jsonlite/0000755000176200001440000000000012626264003012102 5ustar liggesusersjsonlite/inst/0000755000176200001440000000000012540777273013074 5ustar liggesusersjsonlite/inst/CITATION0000644000176200001440000000113212540777273014226 0ustar liggesuserscitHeader("To cite jsonlite in publications use:") citEntry(entry = "Article", title = "The jsonlite Package: A Practical and Consistent Mapping Between JSON Data and R Objects", author = personList(as.person("Jeroen Ooms")), journal = "arXiv:1403.2805 [stat.CO]", year = "2014", url = "http://arxiv.org/abs/1403.2805", textVersion = paste("Jeroen Ooms (2014).", "The jsonlite Package: A Practical and Consistent Mapping Between JSON Data and R Objects.", "arXiv:1403.2805 [stat.CO]", "URL http://arxiv.org/abs/1403.2805.") ) jsonlite/inst/tests/0000755000176200001440000000000012540777273014236 5ustar liggesusersjsonlite/inst/tests/test-libjson-utf8.R0000644000176200001440000000243412540777273017665 0ustar liggesuserscontext("libjson UTF-8 characters") # Some notes: JSON defines UTF-8 as the default charset. Therefore all encoders and # decoders are required to support UTF-8. JSON also allows for escaped unicode, i.e # \u00F8 however this is mostly for legacy purposes. Using actual UTF-8 characters # is easier and more efficient. test_that("test that non ascii characters are ok", { #create random strings objects <- list( "Zürich", "北京填鴨们", "ผัดไทย", "寿司", c("寿司", "Zürich", "foo") ); lapply(objects, function(x){ Encoding(x) <- "UTF-8" myjson <- toJSON(x, pretty=TRUE); expect_that(validate(myjson), is_true()); expect_that(fromJSON(myjson), equals(x)); #prettify needs to parse + output prettyjson <- prettify(myjson); expect_that(validate(prettyjson), is_true()); expect_that(fromJSON(prettyjson), equals(x)); }); #Test escaped unicode characters expect_that(fromJSON('["Z\\u00FCrich"]'), equals("Z\u00fcrich")); expect_that(fromJSON(prettify('["Z\\u00FCrich"]')), equals("Z\u00fcrich")); expect_that(length(unique(fromJSON('["Z\\u00FCrich", "Z\u00fcrich"]'))), equals(1L)) expect_that(fromJSON('["\\u586B"]'), equals("\u586b")); expect_that(fromJSON(prettify('["\\u586B"]')), equals("\u586B")); }); jsonlite/inst/tests/test-toJSON-keep-vec-names.R0000644000176200001440000000257512540777273021261 0ustar liggesuserscontext("toJSON keep_vec_names") test_that("keep_vec_names with named vectors", { # Basic types should give messages # Length-1 vectors expect_message(expect_equal(toJSON2(c(a=1)), '{"a":1}')) expect_message(expect_equal(toJSON2(c(a="x")), '{"a":"x"}')) expect_message(expect_equal(toJSON2(c(a=TRUE)), '{"a":true}')) # Longer vectors expect_message(expect_equal(toJSON2(c(a=1,b=2)), '{"a":1,"b":2}')) expect_message(expect_equal(toJSON2(c(a="x",b="y")), '{"a":"x","b":"y"}')) expect_message(expect_equal(toJSON2(c(a=FALSE,b=TRUE)), '{"a":false,"b":true}')) # Some other types expect_message(expect_equal(toJSON2(factor(c(a="x"))), '{"a":"x"}')) expect_message(expect_equal(toJSON2(c(a=as.Date("2015-01-01"))), '{"a":"2015-01-01"}')) expect_message(expect_equal(toJSON2(c(a=as.POSIXct("2015-01-01 3:00:00"))), '{"a":"2015-01-01 03:00:00"}')) expect_message(expect_equal(toJSON2(c(a=as.POSIXlt("2015-01-01 3:00:00"))), '{"a":"2015-01-01 03:00:00"}')) # keep_vec_names shouldn't affect unnamed vectors expect_equal(toJSON2(1), '1') expect_equal(toJSON2(c(1:3)), '[1,2,3]') }) # Data frames generally don't allow named columns, except in very unusual cases test_that("keep_vec_names with data frames", { expect_equal(toJSON3(data.frame(x=c(a=1), y=2)), '{"x":[1],"y":[2]}') expect_equal(toJSON3(data.frame(x=c(a=1,b=2), y=c(c=3,d=4))), '{"x":[1,2],"y":[3,4]}') }) jsonlite/inst/tests/test-toJSON-NA-values.R0000644000176200001440000000100512540777273020237 0ustar liggesuserscontext("toJSON NA values") test_that("Test NA values", { options(stringsAsFactors=FALSE) x <- list(foo=c(TRUE, NA, FALSE, TRUE), bar=c(3.14,NA, 42, NA), zoo=c(NA, "bla", "boe", NA)) x$mydf <- data.frame(col1=c(FALSE, NA, NA, TRUE), col2=c(1.23, NA, 23, NA)) x$mydf$mylist <- list(c(TRUE, NA, FALSE, NA), NA, c("blabla", NA), c(NA,12,13,NA,NA,NA,1001)) expect_that(validate(toJSON(x)), is_true()) expect_that(fromJSON(toJSON(x)), equals(x)) expect_that(fromJSON(toJSON(x, na="null")), equals(x)) }); jsonlite/inst/tests/test-libjson-escaping.R0000644000176200001440000000154712540777273020574 0ustar liggesuserscontext("libjson Escaping") test_that("escaping and parsing of special characters", { #create random strings mychars <- c('a', 'b', " ", '"', "\\", "\t", "\n", "'", "/", "#", "$"); createstring <- function(length){ paste(mychars[ceiling(runif(length, 0, length(mychars)))], collapse="") } #generate 1000 random strings for(i in 1:200){ x <- createstring(i); expect_that(x, equals(fromJSON(toJSON(x)))); expect_that(x, equals(fromJSON(toJSON(x, pretty=TRUE)))); y <- setNames(list(123), x) expect_that(x, equals(fromJSON(toJSON(x, pretty=TRUE)))); } }); test_that("filter invalid escape characters", { #The \v and \a characters are not supported by JSON. This is a common bug #expect_that(validate(toJSON("foo\v\bar\abaz")), is_true()); #Update: yajl doesn't support \v and \a characters at all. Dropping this test. }); jsonlite/inst/tests/test-serializeJSON-types.R0000644000176200001440000000204512540777273021162 0ustar liggesusers#test serializeJSON context("Serializing Data Types") # Note about numeric precision # In the unit tests we use digits=10. Lowever values will result in problems for some datasets test_that("Serializing Data Objects", { objects <- list( NULL, readBin(system.file(package="base", "Meta/package.rds"), "raw", 999), c(TRUE, FALSE, NA, FALSE), c(1L, NA, 9999999), c(round(pi, 4), NA, NaN, Inf, -Inf), c("foo", NA, "bar"), complex(real=1:10, imaginary=1001:1010), Reaction ~ Days + (1|Subject) + (0+Days|Subject), as.name("cars"), as.pairlist(mtcars), quote(rnorm(10)), expression("to be or not to be"), expression(foo), parse(text="rnorm(10);"), call("rnorm", n=10), emptyenv(), `if`, #builtin `list`, #special getNamespace("graphics") #namespace ) #test all but list lapply(objects, function(object){ expect_that(unserializeJSON(serializeJSON(object)), equals(object)) }); #test all in list expect_that(unserializeJSON(serializeJSON(objects)), equals(objects)) }); jsonlite/inst/tests/test-fromJSON-dataframe.R0000644000176200001440000000361112540777273020716 0ustar liggesuserscontext("fromJSON dataframes") options(stringsAsFactors=FALSE); test_that("recover nested data frames", { x1 <- x2 <- x3 <- x4 <- x5 <- x6 <- data.frame(foo=c(1:2)); x2$bar <- c("jeroen", "eli"); x3$bar <- x4$bar <- x5$bar <- x6$bar <- data.frame(name=c("jeroen", "eli")) x4$bar$age <- x5$bar$age <- c(28, 24); x6$bar$age <- c(28, NA); x5$bar$food <- data.frame(yum=c("Rice", "Pasta")); x6$bar$food <- data.frame(yum=c(NA, "Pasta")); #add to list objects <- list(x1, x2, x3, x4, x5, x6) #test all but list lapply(objects, function(object){ expect_that(fromJSON(toJSON(object)), equals(object)) expect_that(fromJSON(toJSON(object, na="null")), equals(object)) expect_that(names(fromJSON(toJSON(object), flatten = TRUE)), equals(names(unlist(object[1,,drop=FALSE])))) }); #test all in list expect_that(fromJSON(toJSON(objects)), equals(objects)) }); test_that("recover lists in data frames", { x <- data.frame(author = c("Homer", "Virgil", "Jeroen")); x$poems = list(c("Iliad", "Odyssey"), c("Eclogues", "Georgics", "Aeneid"), character()); y <- data.frame(author = c("Homer", "Virgil", "Jeroen")); y$poems = list( data.frame(title=c("Iliad", "Odyssey"), year=c(-1194, -800)), data.frame(title=c("Eclogues", "Georgics", "Aeneid"), year=c(-44, -29, -19)), data.frame() ); z <- list(x=x, y=y); zz <- list(x,y); expect_that(fromJSON(toJSON(x)), equals(x)) expect_that(fromJSON(toJSON(y)), equals(y)) expect_that(fromJSON(toJSON(z)), equals(z)) expect_that(fromJSON(toJSON(zz)), equals(zz)) }); #note: nested matrix does not perfectly restore test_that("nested matrix in data frame", { x <- data.frame(foo=1:2) x$bar <- matrix(c(1:5, NA), 2) expect_that(validate(toJSON(x)), is_true()) y <- fromJSON(toJSON(x)) expect_that(y, is_a("data.frame")) expect_that(names(x), equals(names(y))) expect_that(length(y[[1,"bar"]]), equals(3)) }); jsonlite/inst/tests/test-toJSON-Date.R0000644000176200001440000000231012540777273017321 0ustar liggesuserscontext("toJSON Date") object <- as.Date("1985-06-18"); test_that("Encoding Date Objects", { expect_that(toJSON(object), equals("[\"1985-06-18\"]")); expect_that(toJSON(object, Date="ISO8601"), equals("[\"1985-06-18\"]")); expect_that(toJSON(object, Date="epoch"), equals("[5647]")); expect_that(toJSON(object, Date="adsfdsfds"), throws_error("should be one of")); }); test_that("Encoding Date Objects in a list", { expect_that(toJSON(list(foo=object)), equals("{\"foo\":[\"1985-06-18\"]}")); expect_that(toJSON(list(foo=object), Date="ISO8601"), equals("{\"foo\":[\"1985-06-18\"]}")); expect_that(toJSON(list(foo=object), Date="epoch"), equals("{\"foo\":[5647]}")); expect_that(toJSON(list(foo=object), Date="adsfdsfds"), throws_error("should be one of")); }); test_that("Encoding Date Objects in a Data frame", { expect_that(toJSON(data.frame(foo=object)), equals("[{\"foo\":\"1985-06-18\"}]")); expect_that(toJSON(data.frame(foo=object), Date="ISO8601"), equals("[{\"foo\":\"1985-06-18\"}]")); expect_that(toJSON(data.frame(foo=object), Date="epoch"), equals("[{\"foo\":5647}]")); expect_that(toJSON(data.frame(foo=object), Date="adsfdsfds"), throws_error("should be one of")); }); jsonlite/inst/tests/helper-toJSON.R0000644000176200001440000000042112540777273016747 0ustar liggesuserstoJSON <- function(...){ unclass(minify(jsonlite::toJSON(...))) } toJSON2 <- function(x) { toJSON(x, keep_vec_names = TRUE, auto_unbox = TRUE) } toJSON3 <- function(x) { toJSON(x, keep_vec_names = TRUE, auto_unbox = TRUE, dataframe = "columns", rownames = FALSE) } jsonlite/inst/tests/test-fromJSON-array.R0000644000176200001440000000303012540777273020103 0ustar liggesuserscontext("fromJSON Array") test_that("fromJSON Array, row major", { # test high dimensional arrays lapply(2:5, function(n){ object <- array(1:prod(n), dim=1:n) newobject <- fromJSON(toJSON(object)); expect_that(object, equals(newobject)); }); # adding some flat dimensions lapply(1:5, function(n){ object <- array(1:prod(n), dim=c(1:n, 1)) newobject <- fromJSON(toJSON(object)); expect_that(object, equals(newobject)); }); }); test_that("fromJSON Array, column major", { # test high dimensional arrays lapply(2:5, function(n){ object <- array(1:prod(n), dim=1:n) newobject <- fromJSON(toJSON(object, matrix="columnmajor"), columnmajor=TRUE); expect_that(object, equals(newobject)); }); # adding some flat dimensions lapply(1:5, function(n){ object <- array(1:prod(n), dim=c(1:n, 1)) newobject <- fromJSON(toJSON(object, matrix="columnmajor"), columnmajor=TRUE); expect_that(object, equals(newobject)); }); }); test_that("fromJSON Array, character strings", { # test high dimensional arrays lapply(2:5, function(n){ object <- array(paste("cell", 1:prod(n)), dim=1:n) newobject <- fromJSON(toJSON(object, matrix="columnmajor"), columnmajor=TRUE); expect_that(object, equals(newobject)); }); # adding some flat dimensions lapply(1:5, function(n){ object <- array(paste("cell", 1:prod(n)), dim=c(1:n, 1)) newobject <- fromJSON(toJSON(object, matrix="columnmajor"), columnmajor=TRUE); expect_that(object, equals(newobject)); }); });jsonlite/inst/tests/test-serializeJSON-functions.R0000644000176200001440000000115412540777273022026 0ustar liggesusers#test serializeJSON context("Serializing Functions") # Note about numeric precision # In the unit tests we use digits=10. Lowever values will result in problems for some datasets test_that("Serializing Functions", { options(keep.source=FALSE); objects <- list( function(x = 0) { x + 1 }, function(x) { x + 1 }, function(x, ...) { x + 1}, lm ); #test all but list lapply(objects, function(object){ expect_that(unserializeJSON(serializeJSON(object)), equals(object)) }); #test all in list expect_that(unserializeJSON(serializeJSON(objects)), equals(objects)) }); jsonlite/inst/tests/test-toJSON-factor.R0000644000176200001440000000057312540777273017733 0ustar liggesuserscontext("toJSON Factor") test_that("Encoding Factor Objects", { expect_that(fromJSON(toJSON(iris$Species)), is_identical_to(as.character(iris$Species))); expect_that(fromJSON(toJSON(iris$Species[1])), is_identical_to(as.character(iris$Species[1]))); expect_that(fromJSON(toJSON(iris$Species, factor="integer")), equals(structure(unclass(iris$Species), levels=NULL))); }); jsonlite/inst/tests/test-toJSON-logical.R0000644000176200001440000000214512540777273020064 0ustar liggesuserscontext("toJSON Logical") test_that("Encoding Logical", { expect_that(toJSON(TRUE), equals("[true]")); expect_that(toJSON(FALSE), equals("[false]")); expect_that(toJSON(as.logical(NA)), equals("[null]")) expect_that(toJSON(as.logical(NA), na="string"), equals("[\"NA\"]")) expect_that(toJSON(c(TRUE, NA, FALSE)), equals("[true,null,false]")); expect_that(toJSON(c(TRUE, NA, FALSE), na="string"), equals("[true,\"NA\",false]")); expect_that(toJSON(logical()), equals("[]")); }); test_that("Encoding Logical in Data Frame", { expect_that(toJSON(data.frame(foo=TRUE)), equals("[{\"foo\":true}]")); expect_that(toJSON(data.frame(foo=FALSE)), equals("[{\"foo\":false}]")); expect_that(toJSON(data.frame(foo=as.logical(NA))), equals("[{}]")); expect_that(toJSON(data.frame(foo=as.logical(NA)), na="null"), equals("[{\"foo\":null}]")); expect_that(toJSON(data.frame(foo=as.logical(NA)), na="string"), equals("[{\"foo\":\"NA\"}]")); expect_that(toJSON(data.frame(foo=c(TRUE, NA, FALSE))), equals("[{\"foo\":true},{},{\"foo\":false}]")); expect_that(toJSON(data.frame(foo=logical())), equals("[]")); }); jsonlite/inst/tests/flatten.R0000644000176200001440000000044712540777273016023 0ustar liggesuserscontext("flatten") test_that("flattening", { x <- list(test = data.frame(foo=1:3)) x$test$bar <- data.frame(x=5:3, y=7:9) expect_that(x, equals(fromJSON(toJSON(x), flatten = FALSE))); expect_that(names(fromJSON(toJSON(x), flatten = TRUE)$test), equals(c("foo", "bar.x", "bar.y"))) }); jsonlite/inst/tests/test-serializeJSON-datasets.R0000644000176200001440000000073212540777273021627 0ustar liggesusers#test serializeJSON context("Serializing Datasets") # Note about numeric precision # In the unit tests we use digits=10. Lowever values will result in problems for some datasets test_that("Serializing datasets", { library(datasets); lapply(as.list(ls("package:datasets")), function(x){ mycall <- call("expect_that", call("unserializeJSON", call("serializeJSON", as.name(x), digits=10)), call("equals", as.name(x)) ); eval(mycall) }); }); jsonlite/inst/tests/test-toJSON-numeric.R0000644000176200001440000000265412540777273020121 0ustar liggesuserscontext("toJSON Numeric") test_that("Encoding Numbers", { expect_that(toJSON(35), equals("[35]")); expect_that(toJSON(35L), equals("[35]")); expect_that(toJSON(c(35, pi), digits=5), equals("[35,3.14159]")); expect_that(toJSON(pi, digits=0), equals("[3]")); expect_that(toJSON(pi, digits=2), equals("[3.14]")); expect_that(toJSON(pi, digits=10), equals("[3.1415926536]")); expect_that(toJSON(c(pi, NA), na="string", digits=5), equals("[3.14159,\"NA\"]")); expect_that(toJSON(c(pi, NA), na="null", digits=5), equals("[3.14159,null]")); }); test_that("Encoding Numbers in Data Frame", { expect_that(toJSON(data.frame(foo=35)), equals("[{\"foo\":35}]")); expect_that(toJSON(data.frame(foo=35L)), equals("[{\"foo\":35}]")); expect_that(toJSON(data.frame(foo=c(35, pi)), digits=5), equals("[{\"foo\":35},{\"foo\":3.14159}]")); expect_that(toJSON(data.frame(foo=pi), digits=0), equals("[{\"foo\":3}]")); expect_that(toJSON(data.frame(foo=pi), digits=2), equals("[{\"foo\":3.14}]")); expect_that(toJSON(data.frame(foo=pi), digits=10), equals("[{\"foo\":3.1415926536}]")); expect_that(toJSON(data.frame(foo=c(pi, NA)), digits=5), equals("[{\"foo\":3.14159},{}]")); expect_that(toJSON(data.frame(foo=c(pi, NA)), na="string", digits=5), equals("[{\"foo\":3.14159},{\"foo\":\"NA\"}]")); expect_that(toJSON(data.frame(foo=c(pi, NA)), na="null", digits=5), equals("[{\"foo\":3.14159},{\"foo\":null}]")); }); jsonlite/inst/tests/testS4.R0000644000176200001440000000052112540777273015545 0ustar liggesusers# setClass( # Class="Trajectories", # representation=representation( # times = "numeric", # traj = "matrix" # ) # ); # # t1 = new(Class="Trajectories") # t2 = new(Class="Trajectories",times=c(1,3,4)) # t3 = new(Class="Trajectories",times=c(1,3),traj=matrix(1:4,ncol=2)) # # cat(asJSON(t3, pretty=T)) # cat(encode(t3, pretty=T)) jsonlite/inst/tests/test-toJSON-raw.R0000644000176200001440000000050012540777273017234 0ustar liggesuserscontext("toJSON raw") test_that("Encoding raw vector", { x <- list(myraw = charToRaw("bla")) x$mydf <- data.frame(foo=1:3) x$mydf$bar <- as.character.hexmode(charToRaw("bla")) y <- fromJSON(toJSON(x)) expect_that(x$mydf$bar, is_identical_to(y$mydf$bar)) expect_that(y$myraw, is_identical_to("Ymxh")) }); jsonlite/inst/tests/test-toJSON-complex.R0000644000176200001440000000312212540777273020115 0ustar liggesuserscontext("toJSON Complex") test_that("Encoding Complex", { expect_that(toJSON(complex(real=2, imaginary=2)), equals("[\"2+2i\"]")); expect_that(toJSON(complex(real=NA, imaginary=2)), equals("[\"NA\"]")); expect_that(toJSON(complex(real=1, imaginary=NA)), equals("[\"NA\"]")); expect_that(toJSON(complex(real=NA, imaginary=2), na="null"), equals("[null]")); }); test_that("Encoding Complex in Data Frame", { expect_that(toJSON(data.frame(foo=complex(real=1, imaginary=2))), equals("[{\"foo\":\"1+2i\"}]")); expect_that(toJSON(data.frame(foo=complex(real=NA, imaginary=2))), equals("[{}]")); expect_that(toJSON(data.frame(foo=complex(real=NA, imaginary=2)), na="string"), equals("[{\"foo\":\"NA\"}]")); expect_that(toJSON(data.frame(foo=complex(real=NA, imaginary=2)), na="null"), equals("[{\"foo\":null}]")); }); test_that("Encoding Complex as list", { x <- complex(real=c(1,2,NA), imaginary=3:1); expect_that(toJSON(x), equals("[\"1+3i\",\"2+2i\",\"NA\"]")); expect_that(toJSON(x, complex="list"), equals("{\"real\":[1,2,\"NA\"],\"imaginary\":[3,2,1]}")); expect_that(toJSON(data.frame(foo=x), complex="list"), equals("[{\"foo\":{\"real\":1,\"imaginary\":3}},{\"foo\":{\"real\":2,\"imaginary\":2}},{\"foo\":{\"imaginary\":1}}]")); expect_that(toJSON(data.frame(foo=x), complex="list", na="string"), equals("[{\"foo\":{\"real\":1,\"imaginary\":3}},{\"foo\":{\"real\":2,\"imaginary\":2}},{\"foo\":{\"real\":\"NA\",\"imaginary\":1}}]")); expect_that(toJSON(data.frame(foo=x), complex="list", dataframe="columns"), equals("{\"foo\":{\"real\":[1,2,\"NA\"],\"imaginary\":[3,2,1]}}")) }); jsonlite/inst/tests/test-fromJSON-datasets.R0000644000176200001440000000124112540777273020577 0ustar liggesuserscontext("fromJSON datasets") # Note about numeric precision # In the unit tests we use digits=10. Lowever values will result in problems for some datasets test_that("fromJSON datasets", { objects <- Filter(is.data.frame, lapply(ls("package:datasets"), get)); #data frames are never identical because: # - attributes # - factors, times, dates turn into strings # - integers turn into numeric lapply(objects, function(object){ newobject <- fromJSON(toJSON(object)) expect_that(newobject, is_a("data.frame")); expect_that(names(object), is_identical_to(names(newobject))); expect_that(nrow(object), is_identical_to(nrow(newobject))) }); }); jsonlite/inst/tests/test-libjson-validator.R0000644000176200001440000000074412540777273020766 0ustar liggesuserscontext("libjson Validator") test_that("test that the validator properly deals with escaped characters", { #create random strings mychars <- c('a', 'b', " ", '"', "\\", "\t", "\n", "'", "/", "#", "$"); createstring <- function(length){ paste(mychars[ceiling(runif(length, 0, length(mychars)))], collapse="") } for(i in 1:200){ #create some random strings to validate x <- createstring(i); expect_that(validate(toJSON(x)), is_true()); } }); jsonlite/inst/tests/test-toJSON-dataframe.R0000644000176200001440000000122612540777273020375 0ustar liggesuserscontext("toJSON Data Frame") test_that("data frame edge cases", { #unname named list test <- data.frame(foo=1:2) test$bar <- list(x=123, y=123) test$baz <- data.frame(z=456:457) expect_that(toJSON(test), equals('[{"foo":1,"bar":[123],"baz":{"z":456}},{"foo":2,"bar":[123],"baz":{"z":457}}]')) }); test_that("Nested structures", { mydata <- data.frame(row.names=1:2) mydata$d <- list( data.frame(a1=1:2, a2=3:4, a3=5:6, a4=7:8), data.frame(a1=11:12, a2=13:14, a3=15:16, a4=17:18) ) mydata$m <- list( matrix(1:6, nrow=2, ncol=3), matrix(6:1, nrow=2, ncol=3) ) expect_that(fromJSON(toJSON(mydata)), equals(mydata)); }); jsonlite/inst/tests/test-toJSON-NULL-values.R0000644000176200001440000000157212540777273020524 0ustar liggesuserscontext("toJSON NULL values") test_that("Test NULL values", { namedlist <- structure(list(), .Names = character(0)); x <- NULL y <- list(a=NULL, b=NA) z <- list(a=1, b=character(0)) expect_that(validate(toJSON(x)), is_true()) expect_that(fromJSON(toJSON(x)), equals(namedlist)) expect_that(toJSON(x), equals("{}")) expect_that(toJSON(x, null="list"), equals("{}")) expect_that(validate(toJSON(y)), is_true()) expect_that(toJSON(y, null="list"), equals("{\"a\":{},\"b\":[null]}")) expect_that(toJSON(y, null="null"), equals("{\"a\":null,\"b\":[null]}")) expect_that(fromJSON(toJSON(y, null="null")), equals(y)) expect_that(fromJSON(toJSON(y, null="list")), equals(list(a=namedlist, b=NA))) expect_that(validate(toJSON(z)), is_true()) expect_that(toJSON(z), equals("{\"a\":[1],\"b\":[]}")) expect_that(fromJSON(toJSON(z)), equals(list(a=1, b=list()))) }); jsonlite/inst/tests/issues.txt0000644000176200001440000000027212540777273016313 0ustar liggesusers#For timeseries, numeric precision can result in corrupt objects: out <- unserializeJSON(serializeJSON(AirPassengers, digits=5)) all.equal(out, AirPassengers, tolerance=1e-10) print(out)jsonlite/inst/tests/readme.txt0000644000176200001440000000031312540777273016231 0ustar liggesusersThis dir contains unit tests for use with the testthat package. They are intended to be tested by a non-root user. To run them, install this package and run: library(testthat) test_package("jsonlite") jsonlite/inst/tests/test-libjson-large.R0000644000176200001440000000071512540777273020071 0ustar liggesuserscontext("libjson Large strings") test_that("escaping and parsing of special characters", { #create random strings mychars <- c('a', 'b', " ", '"', "\\", "\t", "\n", "'", "/", "#", "$"); createstring <- function(length){ paste(mychars[ceiling(runif(length, 0, length(mychars)))], collapse="") } #try some very long strings for(i in 1:10){ zz <- list(foo=createstring(1e5)) expect_that(zz, equals(fromJSON(toJSON(zz)))); } }); jsonlite/inst/tests/test-network-Github.R0000644000176200001440000000504112540777273020247 0ustar liggesuserscontext("Github API") test_that("Non Nested", { mydata <- fromJSON("https://api.github.com/users/hadley/orgs"); expect_that(mydata, is_a("data.frame")); }); test_that("Nested 1 Level", { mydata <- fromJSON("https://api.github.com/users/hadley/repos"); expect_that(mydata, is_a("data.frame")); expect_that(mydata$owner, is_a("data.frame")); expect_that(nrow(mydata), equals(nrow(mydata$owner))); }); test_that("Nested 1 Level", { mydata <- fromJSON("https://api.github.com/repos/hadley/ggplot2/issues"); expect_that(mydata, is_a("data.frame")); expect_that(mydata$user, is_a("data.frame")); expect_that(mydata$pull_request, is_a("data.frame")); expect_that(nrow(mydata), equals(nrow(mydata$pull_request))); }); test_that("Nested 1 Level within list", { mydata <- fromJSON("https://api.github.com/search/repositories?q=tetris+language:assembly&sort=stars&order=desc"); expect_that(mydata, is_a("list")); expect_that(mydata$items, is_a("data.frame")); expect_that(mydata$items$owner, is_a("data.frame")); expect_that(nrow(mydata$items), equals(nrow(mydata$items$owner))); }); test_that("Nested 2 Level", { mydata <- fromJSON("https://api.github.com/repos/hadley/ggplot2/commits"); expect_that(mydata, is_a("data.frame")); expect_that(mydata$commit, is_a("data.frame")); expect_that(mydata$commit$author, is_a("data.frame")); expect_that(mydata$commit$author$name, is_a("character")); expect_that(nrow(mydata), equals(nrow(mydata$commit))); expect_that(nrow(mydata), equals(nrow(mydata$commit$author))); }); test_that("Nested inconsistent (payload), one-to-many", { mydata <- fromJSON("https://api.github.com/users/hadley/events"); expect_that(mydata, is_a("data.frame")); expect_that(mydata$actor, is_a("data.frame")); expect_that(mydata$repo, is_a("data.frame")); expect_that(mydata$type, is_a("character")); expect_that(mydata$payload, is_a("data.frame")); #this is dynamic, depends on data if(any(mydata$type == "PushEvent")){ expect_that(all(vapply(mydata$payload$commits, function(x){is.null(x) || is.data.frame(x)}, logical(1))), is_true()); } }); test_that("Nested inconsistent (payload), one-to-many", { mydata <- fromJSON("https://api.github.com/repos/hadley/ggplot2/events"); if(any("ForkEvent" %in% mydata$type)){ expect_that(mydata$payload$forkee$owner, is_a("data.frame")) } if(any(mydata$type %in% c("IssuesEvent", "IssueCommentEvent"))){ expect_that(mydata$payload$issue, is_a("data.frame")); expect_that(mydata$payload$issue$user, is_a("data.frame")); } }); jsonlite/inst/tests/test-toJSON-zerovec.R0000644000176200001440000000255312540777273020132 0ustar liggesuserscontext("toJSON zerovec") test_that("Encoding Factor Objects", { expect_that(toJSON(character()), is_identical_to("[]")) expect_that(toJSON(logical()), is_identical_to("[]")) expect_that(toJSON(complex()), is_identical_to("[]")) expect_that(toJSON(complex(), complex="list"), is_identical_to("{\"real\":[],\"imaginary\":[]}")) expect_that(toJSON(double()), is_identical_to("[]")) expect_that(toJSON(integer()), is_identical_to("[]")) expect_that(toJSON(list()), is_identical_to("[]")) expect_that(toJSON(factor()), is_identical_to("[]")) expect_that(toJSON(factor(levels=c("foo", "bar"))), is_identical_to("[]")) expect_that(toJSON(matrix(nrow=0, ncol=0)), is_identical_to("[]")) expect_that(toJSON(as.matrix(numeric())), is_identical_to("[]")) expect_that(toJSON(data.frame()), is_identical_to("[]")) expect_that(toJSON(data.frame(foo=vector())), is_identical_to("[]")) expect_that(toJSON(data.frame(foo=vector(), bar=logical())), is_identical_to("[]")) expect_that(toJSON(Sys.time()[0], POSIXt="string"), is_identical_to("[]")) expect_that(toJSON(Sys.time()[0], POSIXt="epoch"), is_identical_to("[]")) expect_that(toJSON(Sys.time()[0], POSIXt="mongo"), is_identical_to("[]")) expect_that(toJSON(Sys.time()[0], POSIXt="ISO8601"), is_identical_to("[]")) expect_that(toJSON(as.Date(Sys.time())[0], POSIXt="ISO8601"), is_identical_to("[]")) }); jsonlite/inst/tests/test-fromJSON-matrix.R0000644000176200001440000000265712540777273020307 0ustar liggesuserscontext("fromJSON Matrix") # Note about numeric precision # In the unit tests we use digits=10. Lowever values will result in problems for some datasets test_that("fromJSON Matrix", { objects <- list( matrix(1), matrix(1:2), matrix(1:2, nrow=1), matrix(round(pi,2)), matrix(c(1,NA,2,NA), 2), volcano, matrix(NA) ); lapply(objects, function(object){ newobject <- fromJSON(toJSON(object)); expect_that(newobject, is_a("matrix")); expect_that(object, equals(newobject)); }); expect_that(fromJSON(toJSON(objects)), equals(objects)); }); test_that("fromJSON Matrix with simplifyMatrix=FALSE", { expect_that(fromJSON(toJSON(matrix(1)), simplifyMatrix=FALSE), equals(list(1))); expect_that(fromJSON(toJSON(matrix(1)), simplifyVector=FALSE), equals(list(list((1))))); expect_that(fromJSON(toJSON(matrix(NA)), simplifyMatrix=FALSE), equals(list(NA))); expect_that(fromJSON(toJSON(matrix(NA)), simplifyVector=FALSE), equals(list(list((NULL))))); }); test_that("fromJSON Matrix datasets", { objects <- Filter(is.matrix, lapply(ls("package:datasets"), get)); lapply(objects, function(object){ class(object) <- "matrix"; newobject <- fromJSON(toJSON(object, digits=4)) expect_that(newobject, is_a("matrix")); expect_that(dim(newobject), equals(dim(object))); attributes(newobject) <- attributes(object); expect_that(newobject, equals(round(object,4))); }); }); jsonlite/inst/tests/test-toJSON-matrix.R0000644000176200001440000000060012540777273017750 0ustar liggesuserscontext("toJSON Matrix") test_that("Encoding a Matrix", { expect_that(toJSON(matrix(1)), equals("[[1]]")); expect_that(toJSON(matrix(pi), digits=5), equals("[[3.14159]]")); expect_that(toJSON(matrix(1:2)), equals("[[1],[2]]")); expect_that(toJSON(matrix(1:2, nrow=1)), equals("[[1,2]]")); expect_that(toJSON(matrix(state.x77[1,1, drop=FALSE])), equals("[[3615]]")); }); jsonlite/inst/tests/test-fromJSON-date.R0000644000176200001440000000146712540777273017716 0ustar liggesuserscontext("fromJSON date objects") test_that("fromJSON date objects", { x <- Sys.time() + c(1, 2, NA, 3) mydf <- data.frame(x=x) expect_that(fromJSON(toJSON(x, POSIXt="mongo")), is_a("POSIXct")) expect_that(fromJSON(toJSON(x, POSIXt="mongo")), equals(x)) expect_that(fromJSON(toJSON(x, POSIXt="mongo", na="string")), is_a("POSIXct")) expect_that(fromJSON(toJSON(x, POSIXt="mongo", na="null")), is_a("POSIXct")) expect_that(fromJSON(toJSON(mydf, POSIXt="mongo")), is_a("data.frame")) expect_that(fromJSON(toJSON(mydf, POSIXt="mongo"))$x, is_a("POSIXct")) expect_that(fromJSON(toJSON(mydf, POSIXt="mongo", na="string"))$x, is_a("POSIXct")) expect_that(fromJSON(toJSON(mydf, POSIXt="mongo", na="null"))$x, is_a("POSIXct")) expect_that(fromJSON(toJSON(mydf, POSIXt="mongo"))$x, equals(x)) }); jsonlite/inst/tests/test-toJSON-POSIXt.R0000644000176200001440000000672412540777273017547 0ustar liggesuserscontext("toJSON POSIXt") objects <- list( as.POSIXlt("2013-06-17 22:33:44"), as.POSIXct("2013-06-17 22:33:44"), as.POSIXlt("2013-06-17 22:33:44", tz="Australia/Darwin"), as.POSIXct("2013-06-17 22:33:44", tz="Australia/Darwin") ) test_that("Encoding POSIXt Objects", { #string based formats do not depends on the current local timezone invisible(lapply(objects, function(object){ expect_that(toJSON(object), equals("[\"2013-06-17 22:33:44\"]")); expect_that(toJSON(object, POSIXt="string"), equals("[\"2013-06-17 22:33:44\"]")); expect_that(toJSON(object, POSIXt="ISO8601"), equals("[\"2013-06-17T22:33:44\"]")); expect_that(toJSON(object, POSIXt="sdfsdsdf"), throws_error("one of")); })); #object 1 and 2 will result in a location specific epoch invisible(lapply(objects[3:4], function(object){ expect_that(toJSON(object, POSIXt="epoch"), equals("[1371474224000]")); expect_that(toJSON(object, POSIXt="mongo"), equals("[{\"$date\":1371474224000}]")); })); }); test_that("Encoding POSIXt object in a list", { #string based formats do not depends on the current local timezone invisible(lapply(objects, function(object){ expect_that(toJSON(list(foo=object)), equals("{\"foo\":[\"2013-06-17 22:33:44\"]}")); expect_that(toJSON(list(foo=object), POSIXt="string"), equals("{\"foo\":[\"2013-06-17 22:33:44\"]}")); expect_that(toJSON(list(foo=object), POSIXt="ISO8601"), equals("{\"foo\":[\"2013-06-17T22:33:44\"]}")); expect_that(toJSON(list(foo=object), POSIXt="sdfsdsdf"), throws_error("one of")); })); #list(foo=object) 1 and 2 will result in a location specific epoch invisible(lapply(objects[3:4], function(object){ expect_that(toJSON(list(foo=object), POSIXt="epoch"), equals("{\"foo\":[1371474224000]}")); expect_that(toJSON(list(foo=object), POSIXt="mongo"), equals("{\"foo\":[{\"$date\":1371474224000}]}")); })); }); test_that("Encoding POSIXt object in a list", { #string based formats do not depends on the current local timezone invisible(lapply(objects, function(object){ expect_that(toJSON(data.frame(foo=object)), equals("[{\"foo\":\"2013-06-17 22:33:44\"}]")); expect_that(toJSON(data.frame(foo=object), POSIXt="string"), equals("[{\"foo\":\"2013-06-17 22:33:44\"}]")); expect_that(toJSON(data.frame(foo=object), POSIXt="ISO8601"), equals("[{\"foo\":\"2013-06-17T22:33:44\"}]")); expect_that(toJSON(data.frame(foo=object), POSIXt="sdfsdsdf"), throws_error("one of")); })); #list(foo=object) 1 and 2 will result in a location specific epoch invisible(lapply(objects[3:4], function(object){ expect_that(toJSON(data.frame(foo=object), POSIXt="epoch"), equals("[{\"foo\":1371474224000}]")); expect_that(toJSON(data.frame(foo=object), POSIXt="mongo"), equals("[{\"foo\":{\"$date\":1371474224000}}]")); })); }); test_that("POSIXt NA values", { newobj <- list( c(objects[[1]], NA), c(objects[[2]], NA) ); lapply(newobj, function(object){ expect_that(toJSON(object), equals("[\"2013-06-17 22:33:44\",null]")); expect_that(toJSON(object, na="string"), equals("[\"2013-06-17 22:33:44\",\"NA\"]")); expect_that(toJSON(data.frame(foo=object)), equals("[{\"foo\":\"2013-06-17 22:33:44\"},{}]")); expect_that(toJSON(data.frame(foo=object), na="null"), equals("[{\"foo\":\"2013-06-17 22:33:44\"},{\"foo\":null}]")); expect_that(toJSON(data.frame(foo=object), na="string"), equals("[{\"foo\":\"2013-06-17 22:33:44\"},{\"foo\":\"NA\"}]")); }); }); jsonlite/inst/tests/test-toJSON-AsIs.R0000644000176200001440000000117112540777273017307 0ustar liggesuserscontext("toJSON AsIs") test_that("Encoding AsIs", { expect_that(toJSON(list(1), auto_unbox=TRUE), equals("[1]")); expect_that(toJSON(list(I(1)), auto_unbox=TRUE), equals("[[1]]")); expect_that(toJSON(I(list(1)), auto_unbox=TRUE), equals("[1]")); expect_that(toJSON(list(x=1)), equals("{\"x\":[1]}")); expect_that(toJSON(list(x=1), auto_unbox=TRUE), equals("{\"x\":1}")); expect_that(toJSON(list(x=I(1)), auto_unbox=TRUE), equals("{\"x\":[1]}")); expect_that(toJSON(list(x=I(list(1))), auto_unbox=TRUE), equals("{\"x\":[1]}")); expect_that(toJSON(list(x=list(I(1))), auto_unbox=TRUE), equals("{\"x\":[[1]]}")); }); jsonlite/inst/tests/test-fromJSON-NA-values.R0000644000176200001440000000110012540777273020554 0ustar liggesuserscontext("fromJSON NA values") test_that("fromJSON NA values", { objects <- list( numbers = c(1,2, NA, NaN, Inf, -Inf, 3.14), logical = c(TRUE, FALSE, NA), integers = as.integer(1,2,3), num = 3.14, bool = FALSE, character = c("FOO","NA", NA, "NaN"), integer = 21L, boolNA = as.logical(NA), df = data.frame(foo=c(1,NA)) ) #test all but list lapply(objects, function(object){ expect_that(fromJSON(toJSON(object)), equals(object)) }); #test all in list expect_that(fromJSON(toJSON(objects)), equals(objects)) }); jsonlite/inst/doc/0000755000176200001440000000000012626133701013624 5ustar liggesusersjsonlite/inst/doc/json-aaquickstart.R0000644000176200001440000000331612573053677017433 0ustar liggesusers## ----echo=FALSE---------------------------------------------------------- library(knitr) opts_chunk$set(comment="") #this replaces tabs by spaces because latex-verbatim doesn't like tabs #no longer needed because yajl does not use tabs. #toJSON <- function(...){ # gsub("\t", " ", jsonlite::toJSON(...), fixed=TRUE); #} ## ----message=FALSE------------------------------------------------------- library(jsonlite) all.equal(mtcars, fromJSON(toJSON(mtcars))) ## ------------------------------------------------------------------------ # A JSON array of primitives json <- '["Mario", "Peach", null, "Bowser"]' # Simplifies into an atomic vector fromJSON(json) ## ------------------------------------------------------------------------ # No simplification: fromJSON(json, simplifyVector = FALSE) ## ------------------------------------------------------------------------ json <- '[ {"Name" : "Mario", "Age" : 32, "Occupation" : "Plumber"}, {"Name" : "Peach", "Age" : 21, "Occupation" : "Princess"}, {}, {"Name" : "Bowser", "Occupation" : "Koopa"} ]' mydf <- fromJSON(json) mydf ## ------------------------------------------------------------------------ mydf$Ranking <- c(3, 1, 2, 4) toJSON(mydf, pretty=TRUE) ## ------------------------------------------------------------------------ json <- '[ [1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12] ]' mymatrix <- fromJSON(json) mymatrix ## ------------------------------------------------------------------------ toJSON(mymatrix, pretty = TRUE) ## ------------------------------------------------------------------------ json <- '[ [[1, 2], [3, 4]], [[5, 6], [7, 8]], [[9, 10], [11, 12]] ]' myarray <- fromJSON(json) myarray[1, , ] myarray[ , ,1] jsonlite/inst/doc/json-paging.html0000644000176200001440000240326012573053677016753 0ustar liggesusers Combining pages of JSON data with jsonlite

The jsonlite package is a JSON parser/generator for R which is optimized for pipelines and web APIs. It is used by the OpenCPU system and many other packages to get data in and out of R using the JSON format.

A bidirectional mapping

One of the main strengths of jsonlite is that it implements a bidirectional mapping between JSON and data frames. Thereby it can convert nested collections of JSON records, as they often appear on the web, immediately into the appropriate R structure. For example to grab some data from ProPublica we can simply use:

library(jsonlite)
mydata <- fromJSON("https://projects.propublica.org/forensics/geos.json", flatten = TRUE)
View(mydata)

The mydata object is a data frame which can be used directly for modeling or visualization, without the need for any further complicated data manipulation.

Paging with jsonlite

A question that comes up frequently is how to combine pages of data. Most web APIs limit the amount of data that can be retrieved per request. If the client needs more data than what can fits in a single request, it needs to break down the data into multiple requests that each retrieve a fragment (page) of data, not unlike pages in a book. In practice this is often implemented using a page parameter in the API. Below an example from the ProPublica Nonprofit Explorer API where we retrieve the first 3 pages of tax-exempt organizations in the USA, ordered by revenue:

baseurl <- "https://projects.propublica.org/nonprofits/api/v1/search.json?order=revenue&sort_order=desc"
mydata0 <- fromJSON(paste0(baseurl, "&page=0"), flatten = TRUE)
mydata1 <- fromJSON(paste0(baseurl, "&page=1"), flatten = TRUE)
mydata2 <- fromJSON(paste0(baseurl, "&page=2"), flatten = TRUE)

#The actual data is in the filings element
mydata0$filings[1:10, c("organization.sub_name", "organization.city", "totrevenue")]
                              organization.sub_name organization.city
1                 KAISER FOUNDATION HEALTH PLAN INC           OAKLAND
2                 KAISER FOUNDATION HEALTH PLAN INC           OAKLAND
3                 KAISER FOUNDATION HEALTH PLAN INC           OAKLAND
4  DAVIDSON COUNTY COMMUNITY COLLEGE FOUNDATION INC         LEXINGTON
5                       KAISER FOUNDATION HOSPITALS           OAKLAND
6                       KAISER FOUNDATION HOSPITALS           OAKLAND
7                       KAISER FOUNDATION HOSPITALS           OAKLAND
8                   PARTNERS HEALTHCARE SYSTEM INC        CHARLESTOWN
9                   PARTNERS HEALTHCARE SYSTEM INC        CHARLESTOWN
10                  PARTNERS HEALTHCARE SYSTEM INC        CHARLESTOWN
    totrevenue
1  42346486950
2  40148558254
3  37786011714
4  30821445312
5  20013171194
6  18543043972
7  17980030355
8  10619215354
9  10452560305
10  9636630380

To analyze or visualize these data, we need to combine the pages into a single dataset. We can do this with the rbind.pages function. Note that in this example, the actual data is contained by the filings field:

#Rows per data frame
nrow(mydata0$filings)
[1] 25
#Combine data frames
filings <- rbind.pages(
  list(mydata0$filings, mydata1$filings, mydata2$filings)
)

#Total number of rows
nrow(filings)
[1] 75

Automatically combining many pages

We can write a simple loop that automatically downloads and combines many pages. For example to retrieve the first 20 pages with non-profits from the example above:

#store all pages in a list first
baseurl <- "https://projects.propublica.org/nonprofits/api/v1/search.json?order=revenue&sort_order=desc"
pages <- list()
for(i in 0:20){
  mydata <- fromJSON(paste0(baseurl, "&page=", i))
  message("Retrieving page ", i)
  pages[[i+1]] <- mydata$filings
}

#combine all into one
filings <- rbind.pages(pages)

#check output
nrow(filings)
[1] 525
colnames(filings)
  [1] "tax_prd"               "tax_prd_yr"           
  [3] "formtype"              "pdf_url"              
  [5] "updated"               "totrevenue"           
  [7] "totfuncexpns"          "totassetsend"         
  [9] "totliabend"            "pct_compnsatncurrofcr"
 [11] "tax_pd"                "subseccd"             
 [13] "unrelbusinccd"         "initiationfees"       
 [15] "grsrcptspublicuse"     "grsincmembers"        
 [17] "grsincother"           "totcntrbgfts"         
 [19] "totprgmrevnue"         "invstmntinc"          
 [21] "txexmptbndsproceeds"   "royaltsinc"           
 [23] "grsrntsreal"           "grsrntsprsnl"         
 [25] "rntlexpnsreal"         "rntlexpnsprsnl"       
 [27] "rntlincreal"           "rntlincprsnl"         
 [29] "netrntlinc"            "grsalesecur"          
 [31] "grsalesothr"           "cstbasisecur"         
 [33] "cstbasisothr"          "gnlsecur"             
 [35] "gnlsothr"              "netgnls"              
 [37] "grsincfndrsng"         "lessdirfndrsng"       
 [39] "netincfndrsng"         "grsincgaming"         
 [41] "lessdirgaming"         "netincgaming"         
 [43] "grsalesinvent"         "lesscstofgoods"       
 [45] "netincsales"           "miscrevtot11e"        
 [47] "compnsatncurrofcr"     "othrsalwages"         
 [49] "payrolltx"             "profndraising"        
 [51] "txexmptbndsend"        "secrdmrtgsend"        
 [53] "unsecurednotesend"     "retainedearnend"      
 [55] "totnetassetend"        "nonpfrea"             
 [57] "gftgrntsrcvd170"       "txrevnuelevied170"    
 [59] "srvcsval170"           "grsinc170"            
 [61] "grsrcptsrelated170"    "totgftgrntrcvd509"    
 [63] "grsrcptsadmissn509"    "txrevnuelevied509"    
 [65] "srvcsval509"           "subtotsuppinc509"     
 [67] "totsupp509"            "ein"                  
 [69] "organization"          "eostatus"             
 [71] "tax_yr"                "operatingcd"          
 [73] "assetcdgen"            "transinccd"           
 [75] "subcd"                 "grscontrgifts"        
 [77] "intrstrvnue"           "dividndsamt"          
 [79] "totexcapgn"            "totexcapls"           
 [81] "grsprofitbus"          "otherincamt"          
 [83] "compofficers"          "contrpdpbks"          
 [85] "totrcptperbks"         "totexpnspbks"         
 [87] "excessrcpts"           "totexpnsexempt"       
 [89] "netinvstinc"           "totaxpyr"             
 [91] "adjnetinc"             "invstgovtoblig"       
 [93] "invstcorpstk"          "invstcorpbnd"         
 [95] "totinvstsec"           "fairmrktvalamt"       
 [97] "undistribincyr"        "cmpmininvstret"       
 [99] "sec4940notxcd"         "sec4940redtxcd"       
[101] "infleg"                "contractncd"          
[103] "claimstatcd"           "propexchcd"           
[105] "brwlndmnycd"           "furngoodscd"          
[107] "paidcmpncd"            "trnsothasstscd"       
[109] "agremkpaycd"           "undistrinccd"         
[111] "dirindirintcd"         "invstjexmptcd"        
[113] "propgndacd"            "excesshldcd"          
[115] "grntindivcd"           "nchrtygrntcd"         
[117] "nreligiouscd"          "grsrents"             
[119] "costsold"              "totrcptnetinc"        
[121] "trcptadjnetinc"        "topradmnexpnsa"       
[123] "topradmnexpnsb"        "topradmnexpnsd"       
[125] "totexpnsnetinc"        "totexpnsadjnet"       
[127] "othrcashamt"           "mrtgloans"            
[129] "othrinvstend"          "fairmrktvaleoy"       
[131] "mrtgnotespay"          "tfundnworth"          
[133] "invstexcisetx"         "sect511tx"            
[135] "subtitleatx"           "esttaxcr"             
[137] "txwithldsrc"           "txpaidf2758"          
[139] "erronbkupwthld"        "estpnlty"             
[141] "balduopt"              "crelamt"              
[143] "tfairmrktunuse"        "distribamt"           
[145] "adjnetinccola"         "adjnetinccolb"        
[147] "adjnetinccolc"         "adjnetinccold"        
[149] "adjnetinctot"          "qlfydistriba"         
[151] "qlfydistribb"          "qlfydistribc"         
[153] "qlfydistribd"          "qlfydistribtot"       
[155] "valassetscola"         "valassetscolb"        
[157] "valassetscolc"         "valassetscold"        
[159] "valassetstot"          "qlfyasseta"           
[161] "qlfyassetb"            "qlfyassetc"           
[163] "qlfyassetd"            "qlfyassettot"         
[165] "endwmntscola"          "endwmntscolb"         
[167] "endwmntscolc"          "endwmntscold"         
[169] "endwmntstot"           "totsuprtcola"         
[171] "totsuprtcolb"          "totsuprtcolc"         
[173] "totsuprtcold"          "totsuprttot"          
[175] "pubsuprtcola"          "pubsuprtcolb"         
[177] "pubsuprtcolc"          "pubsuprtcold"         
[179] "pubsuprttot"           "grsinvstinca"         
[181] "grsinvstincb"          "grsinvstincc"         
[183] "grsinvstincd"          "grsinvstinctot"       

From here, we can go straight to analyzing the filings data without any further tedious data manipulation.

jsonlite/inst/doc/json-apis.html0000644000176200001440000241713312573053677016446 0ustar liggesusers Fetching JSON data from REST APIs

This section lists some examples of public HTTP APIs that publish data in JSON format. These are great to get a sense of the complex structures that are encountered in real world JSON data. All services are free, but some require registration/authentication. Each example returns lots of data, therefore not all output is printed in this document.

library(jsonlite)

Github

Github is an online code repository and has APIs to get live data on almost all activity. Below some examples from a well known R package and author:

hadley_orgs <- fromJSON("https://api.github.com/users/hadley/orgs")
hadley_repos <- fromJSON("https://api.github.com/users/hadley/repos")
gg_commits <- fromJSON("https://api.github.com/repos/hadley/ggplot2/commits")
gg_issues <- fromJSON("https://api.github.com/repos/hadley/ggplot2/issues")

#latest issues
paste(format(gg_issues$user$login), ":", gg_issues$title)
 [1] "idavydov     : annotate(\"segment\") wrong position if limits are inverted"                      
 [2] "ben519       : geom_polygon doesn't make NA values grey when using continuous fill"              
 [3] "has2k1       : Fix multiple tiny issues in the position classes"                                 
 [4] "neggert      : Problem with geom_bar position=fill and faceting"                                 
 [5] "robertzk     : Fix typo in geom_linerange docs."                                                 
 [6] "lionel-      : stat_bar() gets confused with numeric discrete data?"                             
 [7] "daattali     : Request: support theme axis.ticks.length.x and axis.ticks.length.y"               
 [8] "sethchandler : Documentation error on %+replace% ?"                                              
 [9] "daattali     : dev version 1.0.1.9003 has some breaking changes"                                 
[10] "lionel-      : Labels"                                                                           
[11] "nutterb      : legend for `geom_line` colour disappears when `alpha` < 1.0"                      
[12] "wch          : scale_name property should be removed from Scale objects"                         
[13] "wch          : scale_details arguments in Coords should be renamed panel_scales or scale"        
[14] "wch          : ScalesList-related functions should be moved into ggproto object"                 
[15] "wch          : update_geom_defaults and update_stat_defaults should accept Geom and Stat objects"
[16] "wch          : Make some ggproto objects immutable. Closes #1237"                                
[17] "and3k        : Control size of the border and padding of geom_label"                             
[18] "hadley       : Consistent argument order and formatting for layer functions"                     
[19] "hadley       : Consistently handle missing values"                                               
[20] "cmohamma     : fortify causes fatal error"                                                       
[21] "lionel-      : Flawed `label_bquote()` implementation"                                           
[22] "beroe        : Create alias for `colors=` in `scale_color_gradientn()`"                          
[23] "and3k        : hjust broken in y facets"                                                         
[24] "joranE       : Allow color bar guides for alpha scales"                                          
[25] "hadley       : dir = \"v\" also needs to swap nrow and ncol"                                     
[26] "joranE       : Add examples for removing guides"                                                 
[27] "lionel-      : New approach for horizontal layers"                                               
[28] "bbolker      : add horizontal linerange geom"                                                    
[29] "hadley       : Write vignette about grid"                                                        
[30] "hadley       : Immutable flag for ggproto objects"                                               

CitiBike NYC

A single public API that shows location, status and current availability for all stations in the New York City bike sharing imitative.

citibike <- fromJSON("http://citibikenyc.com/stations/json")
stations <- citibike$stationBeanList
colnames(stations)
 [1] "id"                    "stationName"          
 [3] "availableDocks"        "totalDocks"           
 [5] "latitude"              "longitude"            
 [7] "statusValue"           "statusKey"            
 [9] "availableBikes"        "stAddress1"           
[11] "stAddress2"            "city"                 
[13] "postalCode"            "location"             
[15] "altitude"              "testStation"          
[17] "lastCommunicationTime" "landMark"             
nrow(stations)
[1] 509

Ergast

The Ergast Developer API is an experimental web service which provides a historical record of motor racing data for non-commercial purposes.

res <- fromJSON('http://ergast.com/api/f1/2004/1/results.json')
drivers <- res$MRData$RaceTable$Races$Results[[1]]$Driver
colnames(drivers)
[1] "driverId"        "code"            "url"             "givenName"      
[5] "familyName"      "dateOfBirth"     "nationality"     "permanentNumber"
drivers[1:10, c("givenName", "familyName", "code", "nationality")]
   givenName    familyName code nationality
1    Michael    Schumacher  MSC      German
2     Rubens   Barrichello  BAR   Brazilian
3   Fernando        Alonso  ALO     Spanish
4       Ralf    Schumacher  SCH      German
5       Juan Pablo Montoya  MON   Colombian
6     Jenson        Button  BUT     British
7      Jarno        Trulli  TRU     Italian
8      David     Coulthard  COU     British
9     Takuma          Sato  SAT    Japanese
10 Giancarlo    Fisichella  FIS     Italian

ProPublica

Below an example from the ProPublica Nonprofit Explorer API where we retrieve the first 10 pages of tax-exempt organizations in the USA, ordered by revenue. The rbind.pages function is used to combine the pages into a single data frame.

#store all pages in a list first
baseurl <- "https://projects.propublica.org/nonprofits/api/v1/search.json?order=revenue&sort_order=desc"
pages <- list()
for(i in 0:10){
  mydata <- fromJSON(paste0(baseurl, "&page=", i), flatten=TRUE)
  message("Retrieving page ", i)
  pages[[i+1]] <- mydata$filings
}

#combine all into one
filings <- rbind.pages(pages)

#check output
nrow(filings)
[1] 275
filings[1:10, c("organization.sub_name", "organization.city", "totrevenue")]
                              organization.sub_name organization.city
1                 KAISER FOUNDATION HEALTH PLAN INC           OAKLAND
2                 KAISER FOUNDATION HEALTH PLAN INC           OAKLAND
3                 KAISER FOUNDATION HEALTH PLAN INC           OAKLAND
4  DAVIDSON COUNTY COMMUNITY COLLEGE FOUNDATION INC         LEXINGTON
5                       KAISER FOUNDATION HOSPITALS           OAKLAND
6                       KAISER FOUNDATION HOSPITALS           OAKLAND
7                       KAISER FOUNDATION HOSPITALS           OAKLAND
8                   PARTNERS HEALTHCARE SYSTEM INC        CHARLESTOWN
9                   PARTNERS HEALTHCARE SYSTEM INC        CHARLESTOWN
10                  PARTNERS HEALTHCARE SYSTEM INC        CHARLESTOWN
    totrevenue
1  42346486950
2  40148558254
3  37786011714
4  30821445312
5  20013171194
6  18543043972
7  17980030355
8  10619215354
9  10452560305
10  9636630380

New York Times

The New York Times has several APIs as part of the NYT developer network. These interface to data from various departments, such as news articles, book reviews, real estate, etc. Registration is required (but free) and a key can be obtained at here. The code below includes some example keys for illustration purposes.

#search for articles
article_key <- "&api-key=c2fede7bd9aea57c898f538e5ec0a1ee:6:68700045"
url <- "http://api.nytimes.com/svc/search/v2/articlesearch.json?q=obamacare+socialism"
req <- fromJSON(paste0(url, article_key))
articles <- req$response$docs
colnames(articles)
 [1] "web_url"          "snippet"          "lead_paragraph"  
 [4] "abstract"         "print_page"       "blog"            
 [7] "source"           "multimedia"       "headline"        
[10] "keywords"         "pub_date"         "document_type"   
[13] "news_desk"        "section_name"     "subsection_name" 
[16] "byline"           "type_of_material" "_id"             
[19] "word_count"      
#search for best sellers
bestseller_key <- "&api-key=5e260a86a6301f55546c83a47d139b0d:3:68700045"
url <- "http://api.nytimes.com/svc/books/v2/lists/overview.json?published_date=2013-01-01"
req <- fromJSON(paste0(url, bestseller_key))
bestsellers <- req$results$list
category1 <- bestsellers[[1, "books"]]
subset(category1, select = c("author", "title", "publisher"))
           author                title                  publisher
1   Gillian Flynn            GONE GIRL           Crown Publishing
2    John Grisham        THE RACKETEER Knopf Doubleday Publishing
3       E L James FIFTY SHADES OF GREY Knopf Doubleday Publishing
4 Nicholas Sparks           SAFE HAVEN   Grand Central Publishing
5  David Baldacci        THE FORGOTTEN   Grand Central Publishing
#movie reviews
movie_key <- "&api-key=5a3daaeee6bbc6b9df16284bc575e5ba:0:68700045"
url <- "http://api.nytimes.com/svc/movies/v2/reviews/dvd-picks.json?order=by-date"
req <- fromJSON(paste0(url, movie_key))
reviews <- req$results
colnames(reviews)
 [1] "nyt_movie_id"     "display_title"    "sort_name"       
 [4] "mpaa_rating"      "critics_pick"     "thousand_best"   
 [7] "byline"           "headline"         "capsule_review"  
[10] "summary_short"    "publication_date" "opening_date"    
[13] "dvd_release_date" "date_updated"     "seo_name"        
[16] "link"             "related_urls"     "multimedia"      
reviews[1:5, c("display_title", "byline", "mpaa_rating")]
       display_title         byline mpaa_rating
1    Tom at the Farm Stephen Holden          NR
2     A Little Chaos Stephen Holden           R
3           Big Game   Andy Webster        PG13
4          Balls Out   Andy Webster           R
5 Mad Max: Fury Road    A. O. Scott           R

CrunchBase

CrunchBase is the free database of technology companies, people, and investors that anyone can edit.

key <- "f6dv6cas5vw7arn5b9d7mdm3"
res <- fromJSON(paste0("http://api.crunchbase.com/v/1/search.js?query=R&api_key=", key))
head(res$results)

Sunlight Foundation

The Sunlight Foundation is a non-profit that helps to make government transparent and accountable through data, tools, policy and journalism. Register a free key at here. An example key is provided.

key <- "&apikey=39c83d5a4acc42be993ee637e2e4ba3d"

#Find bills about drones
drone_bills <- fromJSON(paste0("http://openstates.org/api/v1/bills/?q=drone", key))
drone_bills$title <- substring(drone_bills$title, 1, 40)
print(drone_bills[1:5, c("title", "state", "chamber", "type")])
                                     title state chamber type
1                            WILDLIFE-TECH    il   lower bill
2 Criminalizes the unlawful use of an unma    ny   lower bill
3 Criminalizes the unlawful use of an unma    ny   lower bill
4 Relating to: criminal procedure and prov    wi   lower bill
5 Relating to: criminal procedure and prov    wi   upper bill
#Congress mentioning "constitution"
res <- fromJSON(paste0("http://capitolwords.org/api/1/dates.json?phrase=immigration", key))
wordcount <- res$results
wordcount$day <- as.Date(wordcount$day)
summary(wordcount)
     count              day               raw_count      
 Min.   :   1.00   Min.   :1996-01-02   Min.   :   1.00  
 1st Qu.:   3.00   1st Qu.:2001-01-22   1st Qu.:   3.00  
 Median :   8.00   Median :2005-11-16   Median :   8.00  
 Mean   :  25.27   Mean   :2005-10-02   Mean   :  25.27  
 3rd Qu.:  21.00   3rd Qu.:2010-05-12   3rd Qu.:  21.00  
 Max.   :1835.00   Max.   :2015-08-05   Max.   :1835.00  
#Local legislators
legislators <- fromJSON(paste0("http://congress.api.sunlightfoundation.com/",
  "legislators/locate?latitude=42.96&longitude=-108.09", key))
subset(legislators$results, select=c("last_name", "chamber", "term_start", "twitter_id"))
  last_name chamber term_start      twitter_id
1    Lummis   house 2015-01-06   CynthiaLummis
2      Enzi  senate 2015-01-06     SenatorEnzi
3  Barrasso  senate 2013-01-03 SenJohnBarrasso

Twitter

The twitter API requires OAuth2 authentication. Some example code:

#Create your own appication key at https://dev.twitter.com/apps
consumer_key = "EZRy5JzOH2QQmVAe9B4j2w";
consumer_secret = "OIDC4MdfZJ82nbwpZfoUO4WOLTYjoRhpHRAWj6JMec";

#Use basic auth
library(httr)
secret <- RCurl::base64(paste(consumer_key, consumer_secret, sep = ":"));
req <- POST("https://api.twitter.com/oauth2/token",
  add_headers(
    "Authorization" = paste("Basic", secret),
    "Content-Type" = "application/x-www-form-urlencoded;charset=UTF-8"
  ),
  body = "grant_type=client_credentials"
);

#Extract the access token
token <- paste("Bearer", content(req)$access_token)

#Actual API call
url <- "https://api.twitter.com/1.1/statuses/user_timeline.json?count=10&screen_name=Rbloggers"
req <- GET(url, add_headers(Authorization = token))
json <- content(req, as = "text")
tweets <- fromJSON(json)
substring(tweets$text, 1, 100)
 [1] "Analysing longitudinal data: Multilevel growth models (II) http://t.co/unUxszG7VJ #rstats"           
 [2] "RcppDE 0.1.4 http://t.co/3qPhFzoOpj #rstats"                                                         
 [3] "Minimalist Maps http://t.co/fpkNznuCoX #rstats"                                                      
 [4] "Tutorials freely available of course I taught: including ggplot2, dplyr and shiny http://t.co/WsxX4U"
 [5] "Deploying Shiny apps with shinyapps.io http://t.co/tjef1pbKLt #rstats"                               
 [6] "Bootstrap Evaluation of Clusters http://t.co/EbY7ziKCz5 #rstats"                                     
 [7] "Add external code to Rmarkdown http://t.co/RCJEmS8gyP #rstats"                                       
 [8] "Linear models with weighted observations http://t.co/pUoHpvxAGC #rstats"                             
 [9] "dplyr 0.4.3 http://t.co/ze3zc8t7qj #rstats"                                                          
[10] "xkcd survey and the power to shape the internet http://t.co/vNaKhxWxE4 #rstats"                      
jsonlite/inst/doc/json-aaquickstart.Rmd0000644000176200001440000001017712573053677017757 0ustar liggesusers--- Title: "Getting started with JSON and jsonlite" date: "`r Sys.Date()`" output: html_document vignette: > %\VignetteIndexEntry{Getting started with JSON and jsonlite} %\VignetteEngine{knitr::rmarkdown} \usepackage[utf8]{inputenc} --- ```{r echo=FALSE} library(knitr) opts_chunk$set(comment="") #this replaces tabs by spaces because latex-verbatim doesn't like tabs #no longer needed because yajl does not use tabs. #toJSON <- function(...){ # gsub("\t", " ", jsonlite::toJSON(...), fixed=TRUE); #} ``` # Getting started with JSON and jsonlite The jsonlite package is a JSON parser/generator optimized for the web. Its main strength is that it implements a bidirectional mapping between JSON data and the most important R data types. Thereby we can convert between R objects and JSON without loss of type or information, and without the need for any manual data munging. This is ideal for interacting with web APIs, or to build pipelines where data structures seamlessly flow in and out of R using JSON. ```{r message=FALSE} library(jsonlite) all.equal(mtcars, fromJSON(toJSON(mtcars))) ``` This vignette introduces basic concepts to get started with jsonlite. For a more detailed outline and motivation of the mapping, see: [arXiv:1403.2805](http://arxiv.org/abs/1403.2805). ## Simplification Simplification is the process where JSON arrays automatically get converted from a list into a more specific R class. The `fromJSON` function has 3 arguments which control the simplification process: `simplifyVector`, `simplifyDataFrame` and `simplifyMatrix`. Each one is enabled by default. | JSON structure | Example JSON data | Simplifies to R class | Argument in fromJSON | | ----------------------|----------------------------------------------------------|-----------------------|----------------------| | Array of primitives | `["Amsterdam", "Rotterdam", "Utrecht", "Den Haag"]` | Atomic Vector | simplifyVector | | Array of objects | `[{"name":"Erik", "age":43}, {"name":"Anna", "age":32}]` | Data Frame | simplifyDataFrame | | Array of arrays | `[ [1, 2, 3], [4, 5, 6] ]` | Matrix | simplifyMatrix | ### Atomic Vectors When `simplifyVector` is enabled, JSON arrays containing **primitives** (strings, numbers, booleans or null) simplify into an atomic vector: ```{r} # A JSON array of primitives json <- '["Mario", "Peach", null, "Bowser"]' # Simplifies into an atomic vector fromJSON(json) ``` Without simplification, any JSON array turns into a list: ```{r} # No simplification: fromJSON(json, simplifyVector = FALSE) ``` ### Data Frames When `simplifyDataFrame` is enabled, JSON arrays containing **objects** (key-value pairs) simplify into a data frame: ```{r} json <- '[ {"Name" : "Mario", "Age" : 32, "Occupation" : "Plumber"}, {"Name" : "Peach", "Age" : 21, "Occupation" : "Princess"}, {}, {"Name" : "Bowser", "Occupation" : "Koopa"} ]' mydf <- fromJSON(json) mydf ``` The data frame gets converted back into the original JSON structure by `toJSON` (whitespace and line breaks are ignorable in JSON). ```{r} mydf$Ranking <- c(3, 1, 2, 4) toJSON(mydf, pretty=TRUE) ``` Hence you can go back and forth between dataframes and JSON, without any manual data restructuring. ### Matrices and Arrays When `simplifyMatrix` is enabled, JSON arrays containing **equal-length sub-arrays** simplify into a matrix (or higher order R array): ```{r} json <- '[ [1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12] ]' mymatrix <- fromJSON(json) mymatrix ``` Again, we can use `toJSON` to convert the matrix or array back into the original JSON structure: ```{r} toJSON(mymatrix, pretty = TRUE) ``` The simplification works for arrays of arbitrary dimensionality, as long as the dimensions match (R does not support ragged arrays). ```{r} json <- '[ [[1, 2], [3, 4]], [[5, 6], [7, 8]], [[9, 10], [11, 12]] ]' myarray <- fromJSON(json) myarray[1, , ] myarray[ , ,1] ``` This is all there is to it! For a more detailed outline and motivation of the mapping, see: [arXiv:1403.2805](http://arxiv.org/abs/1403.2805). jsonlite/inst/doc/json-apis.Rmd0000644000176200001440000003534212573053677016220 0ustar liggesusers--- title: "Fetching JSON data from REST APIs" date: "2015-09-06" output: html_document vignette: > %\VignetteIndexEntry{Fetching JSON data from REST APIs} %\VignetteEngine{knitr::rmarkdown} \usepackage[utf8]{inputenc} --- This section lists some examples of public HTTP APIs that publish data in JSON format. These are great to get a sense of the complex structures that are encountered in real world JSON data. All services are free, but some require registration/authentication. Each example returns lots of data, therefore not all output is printed in this document. ```r library(jsonlite) ``` ## Github Github is an online code repository and has APIs to get live data on almost all activity. Below some examples from a well known R package and author: ```r hadley_orgs <- fromJSON("https://api.github.com/users/hadley/orgs") hadley_repos <- fromJSON("https://api.github.com/users/hadley/repos") gg_commits <- fromJSON("https://api.github.com/repos/hadley/ggplot2/commits") gg_issues <- fromJSON("https://api.github.com/repos/hadley/ggplot2/issues") #latest issues paste(format(gg_issues$user$login), ":", gg_issues$title) ``` ``` [1] "idavydov : annotate(\"segment\") wrong position if limits are inverted" [2] "ben519 : geom_polygon doesn't make NA values grey when using continuous fill" [3] "has2k1 : Fix multiple tiny issues in the position classes" [4] "neggert : Problem with geom_bar position=fill and faceting" [5] "robertzk : Fix typo in geom_linerange docs." [6] "lionel- : stat_bar() gets confused with numeric discrete data?" [7] "daattali : Request: support theme axis.ticks.length.x and axis.ticks.length.y" [8] "sethchandler : Documentation error on %+replace% ?" [9] "daattali : dev version 1.0.1.9003 has some breaking changes" [10] "lionel- : Labels" [11] "nutterb : legend for `geom_line` colour disappears when `alpha` < 1.0" [12] "wch : scale_name property should be removed from Scale objects" [13] "wch : scale_details arguments in Coords should be renamed panel_scales or scale" [14] "wch : ScalesList-related functions should be moved into ggproto object" [15] "wch : update_geom_defaults and update_stat_defaults should accept Geom and Stat objects" [16] "wch : Make some ggproto objects immutable. Closes #1237" [17] "and3k : Control size of the border and padding of geom_label" [18] "hadley : Consistent argument order and formatting for layer functions" [19] "hadley : Consistently handle missing values" [20] "cmohamma : fortify causes fatal error" [21] "lionel- : Flawed `label_bquote()` implementation" [22] "beroe : Create alias for `colors=` in `scale_color_gradientn()`" [23] "and3k : hjust broken in y facets" [24] "joranE : Allow color bar guides for alpha scales" [25] "hadley : dir = \"v\" also needs to swap nrow and ncol" [26] "joranE : Add examples for removing guides" [27] "lionel- : New approach for horizontal layers" [28] "bbolker : add horizontal linerange geom" [29] "hadley : Write vignette about grid" [30] "hadley : Immutable flag for ggproto objects" ``` ## CitiBike NYC A single public API that shows location, status and current availability for all stations in the New York City bike sharing imitative. ```r citibike <- fromJSON("http://citibikenyc.com/stations/json") stations <- citibike$stationBeanList colnames(stations) ``` ``` [1] "id" "stationName" [3] "availableDocks" "totalDocks" [5] "latitude" "longitude" [7] "statusValue" "statusKey" [9] "availableBikes" "stAddress1" [11] "stAddress2" "city" [13] "postalCode" "location" [15] "altitude" "testStation" [17] "lastCommunicationTime" "landMark" ``` ```r nrow(stations) ``` ``` [1] 509 ``` ## Ergast The Ergast Developer API is an experimental web service which provides a historical record of motor racing data for non-commercial purposes. ```r res <- fromJSON('http://ergast.com/api/f1/2004/1/results.json') drivers <- res$MRData$RaceTable$Races$Results[[1]]$Driver colnames(drivers) ``` ``` [1] "driverId" "code" "url" "givenName" [5] "familyName" "dateOfBirth" "nationality" "permanentNumber" ``` ```r drivers[1:10, c("givenName", "familyName", "code", "nationality")] ``` ``` givenName familyName code nationality 1 Michael Schumacher MSC German 2 Rubens Barrichello BAR Brazilian 3 Fernando Alonso ALO Spanish 4 Ralf Schumacher SCH German 5 Juan Pablo Montoya MON Colombian 6 Jenson Button BUT British 7 Jarno Trulli TRU Italian 8 David Coulthard COU British 9 Takuma Sato SAT Japanese 10 Giancarlo Fisichella FIS Italian ``` ## ProPublica Below an example from the [ProPublica Nonprofit Explorer API](http://projects.propublica.org/nonprofits/api) where we retrieve the first 10 pages of tax-exempt organizations in the USA, ordered by revenue. The `rbind.pages` function is used to combine the pages into a single data frame. ```r #store all pages in a list first baseurl <- "https://projects.propublica.org/nonprofits/api/v1/search.json?order=revenue&sort_order=desc" pages <- list() for(i in 0:10){ mydata <- fromJSON(paste0(baseurl, "&page=", i), flatten=TRUE) message("Retrieving page ", i) pages[[i+1]] <- mydata$filings } #combine all into one filings <- rbind.pages(pages) #check output nrow(filings) ``` ``` [1] 275 ``` ```r filings[1:10, c("organization.sub_name", "organization.city", "totrevenue")] ``` ``` organization.sub_name organization.city 1 KAISER FOUNDATION HEALTH PLAN INC OAKLAND 2 KAISER FOUNDATION HEALTH PLAN INC OAKLAND 3 KAISER FOUNDATION HEALTH PLAN INC OAKLAND 4 DAVIDSON COUNTY COMMUNITY COLLEGE FOUNDATION INC LEXINGTON 5 KAISER FOUNDATION HOSPITALS OAKLAND 6 KAISER FOUNDATION HOSPITALS OAKLAND 7 KAISER FOUNDATION HOSPITALS OAKLAND 8 PARTNERS HEALTHCARE SYSTEM INC CHARLESTOWN 9 PARTNERS HEALTHCARE SYSTEM INC CHARLESTOWN 10 PARTNERS HEALTHCARE SYSTEM INC CHARLESTOWN totrevenue 1 42346486950 2 40148558254 3 37786011714 4 30821445312 5 20013171194 6 18543043972 7 17980030355 8 10619215354 9 10452560305 10 9636630380 ``` ## New York Times The New York Times has several APIs as part of the NYT developer network. These interface to data from various departments, such as news articles, book reviews, real estate, etc. Registration is required (but free) and a key can be obtained at [here](http://developer.nytimes.com/docs/reference/keys). The code below includes some example keys for illustration purposes. ```r #search for articles article_key <- "&api-key=c2fede7bd9aea57c898f538e5ec0a1ee:6:68700045" url <- "http://api.nytimes.com/svc/search/v2/articlesearch.json?q=obamacare+socialism" req <- fromJSON(paste0(url, article_key)) articles <- req$response$docs colnames(articles) ``` ``` [1] "web_url" "snippet" "lead_paragraph" [4] "abstract" "print_page" "blog" [7] "source" "multimedia" "headline" [10] "keywords" "pub_date" "document_type" [13] "news_desk" "section_name" "subsection_name" [16] "byline" "type_of_material" "_id" [19] "word_count" ``` ```r #search for best sellers bestseller_key <- "&api-key=5e260a86a6301f55546c83a47d139b0d:3:68700045" url <- "http://api.nytimes.com/svc/books/v2/lists/overview.json?published_date=2013-01-01" req <- fromJSON(paste0(url, bestseller_key)) bestsellers <- req$results$list category1 <- bestsellers[[1, "books"]] subset(category1, select = c("author", "title", "publisher")) ``` ``` author title publisher 1 Gillian Flynn GONE GIRL Crown Publishing 2 John Grisham THE RACKETEER Knopf Doubleday Publishing 3 E L James FIFTY SHADES OF GREY Knopf Doubleday Publishing 4 Nicholas Sparks SAFE HAVEN Grand Central Publishing 5 David Baldacci THE FORGOTTEN Grand Central Publishing ``` ```r #movie reviews movie_key <- "&api-key=5a3daaeee6bbc6b9df16284bc575e5ba:0:68700045" url <- "http://api.nytimes.com/svc/movies/v2/reviews/dvd-picks.json?order=by-date" req <- fromJSON(paste0(url, movie_key)) reviews <- req$results colnames(reviews) ``` ``` [1] "nyt_movie_id" "display_title" "sort_name" [4] "mpaa_rating" "critics_pick" "thousand_best" [7] "byline" "headline" "capsule_review" [10] "summary_short" "publication_date" "opening_date" [13] "dvd_release_date" "date_updated" "seo_name" [16] "link" "related_urls" "multimedia" ``` ```r reviews[1:5, c("display_title", "byline", "mpaa_rating")] ``` ``` display_title byline mpaa_rating 1 Tom at the Farm Stephen Holden NR 2 A Little Chaos Stephen Holden R 3 Big Game Andy Webster PG13 4 Balls Out Andy Webster R 5 Mad Max: Fury Road A. O. Scott R ``` ## CrunchBase CrunchBase is the free database of technology companies, people, and investors that anyone can edit. ```r key <- "f6dv6cas5vw7arn5b9d7mdm3" res <- fromJSON(paste0("http://api.crunchbase.com/v/1/search.js?query=R&api_key=", key)) head(res$results) ``` ## Sunlight Foundation The Sunlight Foundation is a non-profit that helps to make government transparent and accountable through data, tools, policy and journalism. Register a free key at [here](http://sunlightfoundation.com/api/accounts/register/). An example key is provided. ```r key <- "&apikey=39c83d5a4acc42be993ee637e2e4ba3d" #Find bills about drones drone_bills <- fromJSON(paste0("http://openstates.org/api/v1/bills/?q=drone", key)) drone_bills$title <- substring(drone_bills$title, 1, 40) print(drone_bills[1:5, c("title", "state", "chamber", "type")]) ``` ``` title state chamber type 1 WILDLIFE-TECH il lower bill 2 Criminalizes the unlawful use of an unma ny lower bill 3 Criminalizes the unlawful use of an unma ny lower bill 4 Relating to: criminal procedure and prov wi lower bill 5 Relating to: criminal procedure and prov wi upper bill ``` ```r #Congress mentioning "constitution" res <- fromJSON(paste0("http://capitolwords.org/api/1/dates.json?phrase=immigration", key)) wordcount <- res$results wordcount$day <- as.Date(wordcount$day) summary(wordcount) ``` ``` count day raw_count Min. : 1.00 Min. :1996-01-02 Min. : 1.00 1st Qu.: 3.00 1st Qu.:2001-01-22 1st Qu.: 3.00 Median : 8.00 Median :2005-11-16 Median : 8.00 Mean : 25.27 Mean :2005-10-02 Mean : 25.27 3rd Qu.: 21.00 3rd Qu.:2010-05-12 3rd Qu.: 21.00 Max. :1835.00 Max. :2015-08-05 Max. :1835.00 ``` ```r #Local legislators legislators <- fromJSON(paste0("http://congress.api.sunlightfoundation.com/", "legislators/locate?latitude=42.96&longitude=-108.09", key)) subset(legislators$results, select=c("last_name", "chamber", "term_start", "twitter_id")) ``` ``` last_name chamber term_start twitter_id 1 Lummis house 2015-01-06 CynthiaLummis 2 Enzi senate 2015-01-06 SenatorEnzi 3 Barrasso senate 2013-01-03 SenJohnBarrasso ``` ## Twitter The twitter API requires OAuth2 authentication. Some example code: ```r #Create your own appication key at https://dev.twitter.com/apps consumer_key = "EZRy5JzOH2QQmVAe9B4j2w"; consumer_secret = "OIDC4MdfZJ82nbwpZfoUO4WOLTYjoRhpHRAWj6JMec"; #Use basic auth library(httr) secret <- RCurl::base64(paste(consumer_key, consumer_secret, sep = ":")); req <- POST("https://api.twitter.com/oauth2/token", add_headers( "Authorization" = paste("Basic", secret), "Content-Type" = "application/x-www-form-urlencoded;charset=UTF-8" ), body = "grant_type=client_credentials" ); #Extract the access token token <- paste("Bearer", content(req)$access_token) #Actual API call url <- "https://api.twitter.com/1.1/statuses/user_timeline.json?count=10&screen_name=Rbloggers" req <- GET(url, add_headers(Authorization = token)) json <- content(req, as = "text") tweets <- fromJSON(json) substring(tweets$text, 1, 100) ``` ``` [1] "Analysing longitudinal data: Multilevel growth models (II) http://t.co/unUxszG7VJ #rstats" [2] "RcppDE 0.1.4 http://t.co/3qPhFzoOpj #rstats" [3] "Minimalist Maps http://t.co/fpkNznuCoX #rstats" [4] "Tutorials freely available of course I taught: including ggplot2, dplyr and shiny http://t.co/WsxX4U" [5] "Deploying Shiny apps with shinyapps.io http://t.co/tjef1pbKLt #rstats" [6] "Bootstrap Evaluation of Clusters http://t.co/EbY7ziKCz5 #rstats" [7] "Add external code to Rmarkdown http://t.co/RCJEmS8gyP #rstats" [8] "Linear models with weighted observations http://t.co/pUoHpvxAGC #rstats" [9] "dplyr 0.4.3 http://t.co/ze3zc8t7qj #rstats" [10] "xkcd survey and the power to shape the internet http://t.co/vNaKhxWxE4 #rstats" ``` jsonlite/inst/doc/json-opencpu.Rnw0000644000176200001440000000766412573053677016767 0ustar liggesusers%\VignetteEngine{knitr::knitr} %\VignetteIndexEntry{Simple JSON RPC with OpenCPU} %This is a template. %Actual text goes in sources/content.Rnw \documentclass{article} \author{Jeroen Ooms} %useful packages \usepackage{url} \usepackage{fullpage} \usepackage{xspace} \usepackage{hyperref} \usepackage{fancyvrb} %for table positioning \usepackage{float} \restylefloat{table} %support for accents \usepackage[utf8]{inputenc} %support for ascii art \usepackage{pmboxdraw} %use vspace instead of indentation for paragraphs \usepackage{parskip} %extra line spacing \usepackage{setspace} \setstretch{1.25} %knitr style verbatim blocks \newenvironment{codeblock}{ \VerbatimEnvironment \definecolor{shadecolor}{rgb}{0.95, 0.95, 0.95}\color{fgcolor} \color{black} \begin{kframe} \begin{BVerbatim} }{ \end{BVerbatim} \end{kframe} } %placeholders for JSS/RJournal \newcommand{\pkg}[1]{\texttt{#1}} \newcommand{\code}[1]{\texttt{#1}} \newcommand{\file}[1]{\texttt{#1}} \newcommand{\dfn}[1]{\emph{#1}} \newcommand{\proglang}[1]{\texttt{#1}} %shorthands \newcommand{\JSON}{\texttt{JSON}\xspace} \newcommand{\R}{\texttt{R}\xspace} \newcommand{\C}{\texttt{C}\xspace} \newcommand{\toJSON}{\texttt{toJSON}\xspace} \newcommand{\fromJSON}{\texttt{fromJSON}\xspace} \newcommand{\XML}{\pkg{XML}\xspace} \newcommand{\jsonlite}{\pkg{jsonlite}\xspace} \newcommand{\RJSONIO}{\pkg{RJSONIO}\xspace} \newcommand{\API}{\texttt{API}\xspace} \newcommand{\JavaScript}{\texttt{JavaScript}\xspace} %trick for using same content file as chatper and article \newcommand{\maintitle}[1]{ \title{#1} \maketitle } %actual document \begin{document} \section*{Simple \JSON RPC with OpenCPU} The \jsonlite package is used by \texttt{OpenCPU} to convert between \JSON data and \R objects. Thereby clients can retrieve \R objects, or remotely call \R functions using \JSON where the function arguments as well as function return value are \JSON objects. For example to download the \texttt{Boston} data from the \texttt{MASS} package:\\ \begin{tabular}{|l|l|} \hline \textbf{Command in R} & \textbf{Example URL on OpenCPU} \\ \hline \texttt{toJSON(Boston, digits=4)} & \url{https://demo.ocpu.io/MASS/data/Boston/json?digits=4} \\ \hline \texttt{toJSON(Boston, dataframe="col")} & \url{https://demo.ocpu.io/MASS/data/Boston/json?dataframe=col} \\ \hline \texttt{toJSON(Boston, pretty=FALSE)} & \url{https://demo.ocpu.io/MASS/data/Boston/json?pretty=false} \\ \hline \end{tabular} \newline To calculate the variance of some the numbers \texttt{1:9} in the command line using using \texttt{curl}: \begin{Verbatim}[frame=single] curl https://demo.ocpu.io/stats/R/var/json -d "x=[1,2,3,4,5,6,7,8,9]" \end{Verbatim} Or equivalently post the entire body in \JSON format: \begin{Verbatim}[frame=single] curl https://demo.ocpu.io/stats/R/var/json -H "Content-Type: application/json" \ -d "{\"x\":[1,2,3,4,5,6,7,8,9]}" \end{Verbatim} Below an example where we call the \texttt{melt} function from the \texttt{reshape2} package using some example rows from the \texttt{airquality} data. Here both input and output consist of a data frame. \begin{Verbatim}[frame=single] curl https://demo.ocpu.io/reshape2/R/melt/json -d 'id=["Month", "Day"]&data=[ { "Ozone" : 41, "Solar.R" : 190, "Wind" : 7.4, "Temp" : 67, "Month" : 5, "Day" : 1 }, { "Ozone" : 36, "Solar.R" : 118, "Wind" : 8, "Temp" : 72, "Month" : 5, "Day" : 2 } ]' \end{Verbatim} Or equivalently: \begin{Verbatim}[frame=single] curl https://demo.ocpu.io/reshape2/R/melt/json -H "Content-Type: application/json" \ -d '{"id" : ["Month", "Day"], "data" : [ { "Ozone" : 41, "Solar.R" : 190, "Wind" : 7.4, "Temp" : 67, "Month" : 5, "Day" : 1 }, { "Ozone" : 36, "Solar.R" : 118, "Wind" : 8, "Temp" : 72, "Month" : 5, "Day" : 2 } ] }' \end{Verbatim} This request basically executes the following \R code: <>= mydata <- airquality[1:2,] y <- reshape2::melt(data = mydata, id = c("Month", "Day")) toJSON(y) @ %end \end{document} jsonlite/inst/doc/json-opencpu.pdf0000644000176200001440000017517712573053677016777 0ustar liggesusers%PDF-1.5 % 8 0 obj << /Length 2329 /Filter /FlateDecode >> stream xV=aHɹ9FTvMR!& ] IG  خvH~`쎯O#1S 0 w3$:;!(1>\]ڀ8Rt) 1.do~>/=O}TJ1 @:*zַRT~+4˨ s$-) 1#q\㪈zSVITbOk1:R ) Q^Vԋv]  CXaZa: eDլf@ǿE@SθK9IY82NаDxSŹGeG 1Tvǟ'4}SLGM(y&IF%b|eM*q誈R/h#G-X |,2Ś:2 i!vS0*#eK%\` THew))HQNtJ,Ξ8;* E_B̅q y{L,;&uVY5"3qO_]t#O^|:=OxFx-È)=] )wu"dx{NqԺ:9 |$ A k& AEI!\h)1. &IJIX ?m bjH(R&`+/>bCT}\n:Q֩z(ele ٙv3ig3^QUol PIȔP @m`HV.h%tw*ݭGs5@v D՟ A`$ܩ%$?^}A} V:e=訽 "ڮބ6 $6f 8M`5TsUsI`70>-qJvg٢R9Pyu$&T;(H㴁iVSXMQbGCByUL(3*O:jPs]l(}Sܔm}"R v0.uIQ *" +x d9؛,-^?-2krijP扑]Hxk H_" #̫&_5 /UG\cG`O\?Ɍp8F\" vu!c2S{ _̩s352gۦPA:, ɂs^OxyB#OUq0 jL% 7#xĨlp i*LM24ͅ~0.P=ؕ8]\klU֋,-bá[J@7ꩆ>|l1$`x$H `GC*@"ye̵?tK1fDkFF9s %k K.@A"WYt'WYr6Qk7oW7rT07nun"?&;[JmAC~c r18[Dn.2uR[m;WXf{Տ{&>'s3qU3j Kp_ၛA#0T8H&@]%$ )I rB2\;Q܈9Щ}(YZ5A5*s4_Hm9CQ>;acۥ[Dm)9QqhJ;Cxp(P"}/n dodnЏE}4[QQ;Ms%F=wŝ^@nz̼I`=<@}w{/w%`bX[kOQ \鯼}MVFI0Xh.De EnL 0F? ձ)Gxr{\ xsWYA,i) Zi.Z t֤_4oshq y=5|n d'Gʠ_54d?kiD+% ~Z7r|eÝYV#؏ ;{k?*4k')M& ( DL endstream endobj 23 0 obj << /Length1 1602 /Length2 10760 /Length3 0 /Length 11803 /Filter /FlateDecode >> stream xڍP- BF{pww @4%$!;Ak;3_^uUY>k&WQg5P&6fV>+Z j Z qBBe@賡" l `qqYYyc$.VfEfB-vpXYXB5r3ALE dh PZV ucaquue91!BtW+%@ [(@ƌB аrS6! dloe ?4`53g)hneo0nPF! tZM (U;?'Sԉw,`|f9s(sM@gc?y~E`px@ 7),ؔ?к:Jؕi{Tpz[;c12r%4Ѕ)I{)@qTҜr`6݂}oP Ȏ磣͋&v9\Gg |iޒm՝J.yԇI(H}i<BP&$zS7˫)_drq (^GQ,i;uQK{5Q9rUp.;,B`O !$I0W#4be:yKVD]$mPA9PdD=^֌k'iѱf;g| ![ F[7KRB9 傒2zڀSTm^ de%4\* M8q;3#7SYm?JUq8"dP ™|Ggmg4_;H$٥|M-r ^p~!؆*xӐi*y-pT:-U}Gs :]*&N 8QĂ;Hnt}TdN RגrkJއžZSUH r FHS(Ч.d_AHi Z񲢳AnyF+oi$x)Z>݊؄@%x~(mͷV,OsGDq4U,#6+,DQ1(y~b;j;[:)S/\;}cH>fe/\K^o "M紭T9ɀ>%hwC0D`-E㨒-Z1Y i^YcS۹PA֩<#~MhP@LL0()4u,[_r.v`Tznb!UTVhnm&LkKuTJeq{Mr#hQ]u sK}"r"}jՈ=..["{V`;3CO喝^1iJS5F*)ѢU3K<+Tn_ү|x[f QӷUb-TE&IDG0([3$ }m:Ȣpc|Q'ൾ7O12`&'In1ua5!#HvͯeCݭMB,HV,IP'nm_)KᎧB$QyCӚ}i#g5Vyh%L9xK7cډVSևmQStti)>S⾓z2 f/(Dqg%C{; urZLjon2Yhu[&''mO=6y~2&Gz`S ] MfxtuYK}rO$n\[}#eU33օՍ˙6}@v^#/} j̲B2e'ŋvliwL!۹zXpe?zѻvܨH>0 6ܩᩈY o~:9Zt ՒN@X'{ rB$LIK!/'܇_asv~{:Ul_'#ۗoWh S" ?p n]2jhUyV"ɜF赁pm76SI8#΂[wMBB킠BՄq? s)/吴HˡA8wKDQdݣDߛ_avzꩥz'XiDsòt'ߴRV"za p >@6sҔ<1JRBH3_Q*iFуV?``G %TDPpr 2߶s])7XE~-Vm;4>X\6{CZj]?-{k<k.&QnjpӶyJhiMO3wj/ a{`Mfd*K,J=1Fi;7{b Vz*2{:<\M/_31(\)":Huy]`M\K2U:95kV˰O-KY˽/ڵʄzF!oA,MQhu;jo햻*r2*2$!ߴ:&qa7gĆOD,#<9oh8FLGX 7h (|&rȡw =1! {x_(ias~WSUKrI&]})H -A0x؜pS/ Xӛ@}xF}8} h~tFHj4'%%^5d3 5\]E>u ʂ~XԔ! ɰyucN3A{k,cb]7x xVFGkoe6Q^eX=HhcVbRDչ/׬{Rb9ߗC߮Y5/:B5x7J΂-cogQBu 6kN#wQ:jh,w_<y+x1c"6zp$h=fr]k)ܚ6/6Č6^Ȓ]ۘu2j}#Ri?tB*6;l;l|u֫R'C;O*rT{. 5WG-q!E9ֱTm*~Vj7=|w/ԍVCPO=k[bnEIWNuDZ'J\9W:XL%*o-8 8i`X1H[ ;)|{hx]^,0ȟ|?2lZnFD_=}pHQ X< XִO#ͳ¥xfm~HX#r}r(uK : *ړVtX) x-^k\plF}*3*n{LC֙(\LfjDWxm404e%iđ#m]i RלLb%0J|忋PyKhEL](ռ4Ogy\kzQ!hA1Į͢T܉]Q>+s&[iet.w')sc \ _L4KwX>^G.D:k @|@e[:IËv_J. Q$h#8bZLG`'/z M |x:D6c;S9?Z7mgS'r+R}5 SVPv;$ >B^i>I0 fUs]DO45 #Rb} Fvs\_^a\"qXnfaK ƻA:AJMڎU5 GxϚg:f~m)- ]l 4>!%d#oB! *z}D9Dh^JKMGV )ohH]AI=Y$Ɠ1uF{]6՚V؄R^'[I.Gv}`%P ]J.*w :w|zI~Blc0)0 5+W%:)ZU^#d#CY{4Ϟ . sŃǎץV|sfNH)ْNg%OK̴f0^oӽ=ry +픔mzŀGFk 'R⬹?PfLƿBLoӒYz%[-* Ҹ!Xu%\oI1(]|Ǫrft?N9)kEaI/[VCl+*6ug*tL|eڧτT(#CsszsTPp אG-ޝނ%h@&`g-ѴQE2ѷr7>J8Ypwjt:j:i{@KZ@ʞp蛓MJA.Z'뛡TzM$KCi %w]`w29hw.L{r~ d5awWnοgx#~<}8)nm"J<9b<,f^B(EGܶ5 iXG}yflĞ! zs|V]#>/]밤:FuS'C>YZc4Z5l~j\B#%Ȉ9y9|5{ ~̦$yWGzF {lʙ9ڛѮ`.flKkȉ/˄|P.]OXdOf+GwGMB~mgJS d]/ ڱz *v&F\)ti_rt m!\ع޻^Vn^H?Y#Z+Vؽwas2*.#gjeZB^ ;ΊuGXsH~pdLkYPDXwze+l@=O4̺ m@AO'MޜIK@V?j4tK6}z6-?UB̰nTa"̆Ib`p^JY={1p&RK L^mFavڞCZq19/݇c\>K|8O C"֟ 7#v~ WMp 7.~5 Y l[{hW3$dӰS&.8LMSʼ_HXC}^ۤcD(ܜn/>:W9lA?r.GŶ%vĽM.}YQ%Ŏ*XY1*6B_GyG#ts'(}")c1/9/+~_ ӓĖc-FFP条&p#W&âiۧkd? )J6sR et`lz曷40̳p?1S^Wn#]]@ap:Ly*,H-}5N4sLwf﷠ڍ^7I$k<,F9&5nm ~ _ 5^Ѭ5N-^q+;f$=$l vĘQoɱ2pIZ#VpX9mάV?<Wݩ}Z.)3컼T~e%( \ud@L*9Ab}<3J>!ma?YGnjiL2K\n̊|BKyƙM[8bDrx4D]V~Dva&>4 |eܩ[ZzJT ֏y2UWEt!p~UP9{kH~+7\ʖ3 )mycF.Sݮ9 %ݢSeZ4e>A\j,iw. 1赒cquhG:]gՑyW ˩ὤAN)>볆zОefeƣj@| 6Sm4K@0Ņ%;eƯtm#NjQ+:yu'eD*0LyA19{g42₰.#O,GNmlUegRT(0d4qs F.C8^s!b \穭 ݻY<^ќ:{UMkN>& /?\nͫΚ[Xd+P\t6 G2dՋC=ɧu6b7]x=bkI%Cs qp5vyoVWaU$Go]|Q&]PXVG{\;rxz:HSώXBy/i4FpHƵ5m䔺B^k#mہMm{˲=Rqɀ~H܁¼Xs$<zkzA=B`|™}Xuwb_ku⟇JGz꿶 )8L "mW7VbiP;'t-\yRqNdgwu&Y+SϓHԵzLxO-~0״o~y?j+0@"H$VSNHw򃊶8 噡aB2tR+`ΌDւVej{ 5ѣ^D*SZ|e.p!#L2: qf 9K[[>9Шk:qZih,hn`LE@[5Љ “vE+.KSeEAe7fcց}u)ySWINbZ$չ #j~ gHJ,d*Ao*+v)ՃMuaP.X+<O0BAP2 ;!{s2Ӿ~d0[TIXrTYEWr|(o u>HtX3-Ek5@q~N91<N sɽ F=KQqc3qQQh [YY H ^M #~%Nɔi bg~>'+c&`J LW*l htZ'ȶ + X6$f:UUdMfȼ*YcQ-ʧlSN&V^ B{mg؍4zP>q8Jd1L@M2f a!ZLiKj aczsC*5)#Xd.'d,HLds3=e~<ʥ: B!v!S ćQC<\A:[w@_ q42V[x@|WlPs evtz;4L ZF\8ZӪ_i#e e\uQ+ { -nva3.w*b3mDkW/wQ >4Ҡe}iĂv4$~,p嶵˨UKZwmm즗H[6/LkO HΈaW6zYgGu THb^b#/Ʒ\&FF֨mqP7d%`PI޾ֽ)?7宭FE˞mI[8ɥ|^|X'؈1d PVQ%c̷Sn\({̸>׏6!j[)0pET`/;\$]VR]EI5({]eJ:7&g*ꖔS*xϓ$ şS0;~=f499e&AxиjCtk.6 wxu-gR0>>z@Ȕ9x~J115[ܷǢ[S$9B]Hl䦻'Ys6y_Th% h01X( }^%f9~uMIJ?oBиf즴BNcdE:xEfw.˓vGmhwt}AɼV?ݼ O5DCқV_9DdomXZ vuic̳(\-*Yտ,-﫧Ȍcytuc/ZrJJ%äc1Ȃ|{咝u%܍lQbk-C YP,2œl'gM9._k{B/A X endstream endobj 25 0 obj << /Length1 1574 /Length2 8687 /Length3 0 /Length 9716 /Filter /FlateDecode >> stream xڍTl6 *t,HKw.,ݩ44Jww)-!}gk暹gfí p# U1xxxxxtHvl&} !69]@>PP($G@nP0@ A\dNXY@!?ҎvA5,vH(7;х dCvm ~ P9Bj kuӡAj ܇t*@ 'p@.;G0yBaʅ@r@0o"~rA@V?JuBp@~;a`Y#t]'_küF6Pw`W'n=$ބ t= 8=z;6m@|6loDB|o Pk$ b a ?0ṗf abny%uMZSFp @ @Hy4Aп'V fY./ ΥW.Myx￀r#O忳_):8g1늼5.jsue)!A u. PXS.~/ф@?-N p.0k8 @^Jo:!#Cl7*(m 5yܚ ^?^p ^ܐA~ݿ=/( v//x_ӿ /xY"/*g XcB^ք]VISsJ|fZ7HaGHd8NH*r|{UKVύEz+8٧]^,jN>>Q;]5s/{=j{K ͬkmT ܔLrFEN1e[GGr`zL&Ug͗m7jkLץ c gDϼeg b" lhg% @t%Cc߇(-eKV'bVdt2;(j 'o.Zd^S`O=MWoHwlJ{Wo͝9^lo^#߱1E{S]=Iq Ў.S rA ;{IT*/*wzuߵZ0߸z۬k2RTѐuNNw2VDRejA"(/)ܮpNe.2ي)'j?%C,OԃH'3K6pn3PϞ˽h1n{|:l>uV&hSR.4/Jp:bC8ǚ:.p H1L%vg}< [UtģGm=w~ K#{1]PCX6h?Tp[$˥>)F Ŕ(e8Qdg !q8=Hm6@Bط8c1Ό} Ja:EXu [}9cʤS *IN9M ~3@2@j?P3!M^ }r ^'}8v?,ZI7aq_AiA]}xSم&#`q5L[k55aDjh.Zm^o#~~YgͪYb} 'ܩT+>/DH!NlHXOh)h$423&hIzZW~345L%jD])I4 tؤr]7gO֯,  n_e?t$jE;*V7 Љ>@tfQz7\/v1>lLI_e3îdhH26f˒]vOi=ה&~ng*ToB"QC,~o'/mnm|N"F}]B 昹)`9݌,+^͏g%_8'9k%'R$* U ɇW"Y p*ñ}{b =,{W#顾W]jFIgxݬw]( b즎أO)??!q GWXM~0i?l {KXK,WhŜdqMaHX]]cxfFzHGz<tJqTlڳ^{\CAk U7yw龰 jJXX,)c)I zZ&t Wpbx-lIS}|f#\55_F c-6MZK;9o?5 [Wcf 3U{#bP+mTT@U"IX+&ʚN9u9\CYWR"9ۀ(psӿ󤌿IKśu^b%@ , ,;'RBC8!5ߨY38 vDj}/4 ce"WS%X\`QӋ䬃ܵ#3]qK=t$[ռ,2ڵ˕ġ|_Ul*RWDm&t#J \2RJ zjsb0](2N8:lZ[Ay-NEn풩,J"FIUp?0ǘwi.\SE}ɜ b7|;5Üř(`9`Q]MC7RE9eH`o vS '._'c ;d좎0Rr|?xEyVI^n T;*CX7"Ĭ\pJӗ>q\n TZ~吥5r@eff6pˊw `YQ1S "7_nOyPЇr!86S@7XLsꝠ4G.mL\8g)_CEN:)/b^vmbeQ`Sx@rZq߭1]RGۮ2IpHz)JTВх ?oA.&ogwv2C0%iphlmBb2p*p5ݩE [Csn^ӿ4Dk5tyxۑnZ~ {:tx\)>U#A;IU~[|H& ŦVW#Rq"1pb=y ,U Um%@jmBxcFns}DR؏(N1-,mAEښa\v'-8ǡ0B&~!WᗟXbE׏A-yRċޡY뫊31׈~v(ޚPlԏi=U~ܖر?*nټ)f&yK{;`|Nx[]9Wm\FvLS*Mb?Ft%J~3K,0xeLJQxr7lïh>\f{{Dt3ײ'DqΊ2q:pZ:fn…h4?'ztjmpEP꣉aįfc'7ˑr|eYFf"\_.TpTk3KZeOYiV޴/o&`ޔnT4*0 pf@3" })O$3EH捹ĺ'A"/Zq&XB7 agԦ߉s^-T)'4 SvH7#d37NOG2s\qPqjՌ}@YSM7e+@LG9.rvDDWkp;KgW!w3<88q`rҏzg4ǻ h&LC=xHP⤞{{Ew.rP 17c}1قZVT JwŽQs"߅s(!P 7߭Wsúعd&Hz^pndteX"bljCɣ?sPM ʐԺ]L$j~7fy1\yM_52 ##*IN 븠O-dg77 yyE~q2_4!{n^pBV2D>}bF+8] s%"Թϫ,)Hd#9,=٪1+l^>û6.Eиgu*t\y-7ѱy۔(oe*b 59Zz{insG?/yekyN)#}ۿ+lWOrl(R1 jtp*&E‹B(QM?Dn>JL24%D0 *K]+)s0խfK2\,EШiA6| E.X843x{4VJʖ-J(]KgaXoQTEj$Ala,uw؝?!DJ7hVraL8*~C)+ u#-krqcEA_az&{Zm1Em5 Ɵݬ}V7R 0=UYvϑ s@T@el(XQ İ*%0GSQ̯aDFO扨ӒqʩL-ƒ rݭp;&~D, < #%w3T>d7{AWMU<'g&1FT%WfVn+-^qT-k͎m|,48EQ!f$.xI)P XUMHFބv:Ѣro|1$prX妗 Eay۠H<5חv,3UU6sr48v9f=-C)x@8G8`0Ԗ<4`쳫t {ihsN3j`zlNMc;@(JdZ2Fɲͻ]+ky̡tbԭ6]-Te91߱@:nGCf" )$(vQ~?كrkoRa咞~B̹t]㑕EѼ)AMv g?&Th~u--/Pذb2A՟Z7Z w96!%bޑpU&&/x|>'Bj/n&qwYfuۗRr8V3'1XnX^hM"SIM]y_0SB9EHэ25.8e'尷qwCC;iKkq~tK=}O.Iu>if6nKID G߲{`_HgEDuʂ6 t5=@d{3=w&zdxjeieLrsBqCަ uc#WwRiOg?9Vlgw67=\JgJ.5qe6 ˕fl{4v0KD0Ǻm*g񭴺n"fud%&w/ۧo*n TԃZqB8vLQ5Q(n1P+CcTȼ7˷g믖QYpZ#,n]e:=Pq[)9vAƞ},/זhwP襏cv_ߘi嵪16qHS6܈U"?IF~JY H{X[-Tmj(zԽ6Z5NZ~NNuMzj>ٲb"OġC_3㫚k`^D :~0鋴aθhV9s<$vLrq!ŭXlR-3YhtJq {4W=q~Iy^Y5u&fdo05Njעb2Zӽ/MvOF)ᷧ1⦿Uy|"yMl~uwf'Y_t󼺑~R:`]W綔wCi JD*_'C=13Is="NcVcoǴ;ĞklF9pjq/q؝@:Sz~Q{t3g@S2hfTd0VhgD5"uxsi[@Y }yؘӰ+&IXFn(SMÑ2oJ4Lx2+>CM_vʧ<"%e{LLrgE)@"G6XR,AfFZAPz:E HҗPLkH;ONSv[8_އ ,5O7scl»7c͆B_DYڐ\uQ)uP ȶ1}zJVH&,1k7J10NB\BIzV0:{g&;Q EC^vMNo/)FK*w4Cϒ)6@(1; d}~_2NFR@u'S5AFJ;eFazTFo^C4_p\djpД@->.zS _`hU=,ZJOUܞ_ xMCa9)k!CqNZ5sȠ1L@2$q4k>jֽnrAB5¢ĉp3vۭ{Um 6㝻^"Ƒ5&6!:@ిz%[+M \:rޱ!E(?p 7(clS^uċKeE~k]-w:8|9\ݪqNÎ#dB=j(LZ '2T0 s",W>TYxX1pHm"Z!'*b.clͽ2޷> stream xڍP۶- !{k4 A݂<>瞝soL[c5)HTM팁v gzf& 3Bo3<G#&f'oȸY<̜^:33L-M@sK?@wt0k?Oz2x2*JHjWع٘,LDe,M\i_l߷[R{W-@uؙL޿w&$bcomdkiwѺ8w&_C+4t^igAg-$,݁J&R˿M%dd׵?{-]KLL1v!#v,1dxo`f׉rpE28 ?`T0< w{cacbkO_hd0F?{9? ;ޮ?}AwVvt~~gi}]wNN^NNNNNo_J2qqt|Lw݁&sv&Vnch\`h2PVũo_`š[~{?ĩL/L` %#W~qh%C…#ty$tnWyCt>Z=J7h"8sڙڅ;47b8Zxh/ j,N8h#^"2^+} M|RVQXҫ,U"A 5#;I`*> GRE̪HtVk AjFU ԏ7 űc:>ДN"^As7O:+<)PΣ:4(tKI\PF!|^"Ox[LL\P#gޗ_}*/7fPmނ8ތ\\ZfR礋w~YR2^lT`1xo/\IKvWFzX=r1iTIJ)Dҏ pߙlݕci<^T ~T ҮI1UR8Cl71٩+mC hG]U|!_ނXA7n֗6j(_)2! 4._DUr<7FI'ɽl*tzQ6q$iQfknuK53}H26V& )@~ Uƚ\-\BՖgqL#n-_Hn8`3G)weP~Ze\b$ vNVv IY@XHXmY*ژKt%V`zT*ԍ a$*ŞZQ10R0n/NXx9YHƒ!$ys7R Kpr覆;eICߟ|DL%ްo|-u kk邃לX#P:I\J|l̳R`KȈM>.̇E,Ӱꤜ+j'}{Cx8:Џ+;2OBah\v/3Jb+_҃6)S!bobuC$Ia#sy,ţvaSh^|0~G0@ND]%kCؑ?x/ˮo7r"Rd>a$NADm,/??!٫a[9% fSXF%#P \!נ\QXĥ9X+:WAj*hk(mSd8hw,1}]v] rHxjLp 2ƒ*asI+Nz⪏bT.c&TV#OolZ:P̊u:>VȠaIU6Gtb_CTbIVA-v66敝KSѫ$ɳú/Fl#2-Ƣ@itg\',LE~>\'WԋRG WHyINnri9WsM x \b,hWaa!)\:10ͥ ʎ\Y+.f% q^(($ 8M'6 A}'\ dasC+D#a4IMoУ4J.|0QW9 _S銥e O0(?TBB,gL,J܉cAm/ЧfcAV xkŴlP,TUm3E[rFN1 ges(c ̲QOKώ~3멌 P\,u|~D|EDuZP*Ov(_=9u\n}y/-ۙSFWH{Sj ]:W,(q ,ϓs=&l†}7SrIݻtJ9>_h6|{Қ{zoԊxV}atiptz;( w6>y9 msWNKCU2ԊG;8LWǃOg%F:fJlR!٘G0u˒Ds(Z褬"7{Re:c& ܥ,2-"/Ng?ͩoJ1鐜t*F$zxUȹ80(ßl=*i,q#\4#zTD UDl~0Ȟe/\;[8AR\W*?dw1g0XRtgNs9z>t~E+,â1خ,E/JL2X7Y6Lj6$8- [\!kHZfjDG;y@JS'ZJwx[uymwźhEӶkf0[v<},?!Ɯ[ ?K_?cmhtV8S5b8>" ubD1sJ(jzPw^ĮO;޹O(]PoLOH^HU6%בICh\ry!e u]ɹgJ][u|ǜ{Q,V06 ]nm_ β#^,Rk6?25R'=׍c|ӧwolYbيFs;'vO}?Op _Jb>4Wsz18<:|oW1 6}IQ2&ID7'eUN%-*XseDBʩEe{ῚZ`іorv9\kUԴP-Tʥ Sݝw?I`jHc9aKo368uCށ[D"?>j{8 }d?_Ӵ0ܑo x|njz47F,uS8@0y-/>Qɇ*tUnl,N-IIw^&7H (22*Suם+15O12EL PYZ@^utz=Gvv ^:b*t˒N+ћ A5Ss}7ϝL5|4-smlfMb 1 ?>BM|^µ1'DoU]3wS?O__6 c%| ; Գ#>K !ϡpH# < j0(0[Ik^kU}ɳreG" bd1?VixF<6 9Y ɍ=bagԊj3ơ I(^B3f[y"^.@;$'GnN=߬tֶK@X{_rEyed.jOQXux 2uM:c|jU6sX~-P2Z ߆p.CWG95QK˛ Z˜#%%ABN-bߤ\ڑWH˻qc:_Eˏ_ GgZR g|Wї@U(?%a A_a Ga]عIs&BSb|VV0!rN1kQ+k* Ҩ'!ًλ@'.![zc⳯ךEd| SH>Hs=ZzM!`ukEj39Ra矴EB'm#Qh:-<^Sn7ܮo){N,(~أUnx$ăbZ[Berh|Vp^/,/B `sX֠k0VozfLLf^?>쵒p}0u3)*דw#%90Nq2; aqPhX"2P%ĿY\* י@]KڤzBk qd}ۿՁ(I鴡 ?Yw^LtDڸ'r|Cd f{nqRBͺ}"4o}]0.A%1ҥ$ g. p\v~xA]Ug3 =.<^RA?5 Ṟqw$^X"I$NHc':ҘP-Õůp8DG s67U0'+ކ@d Ii.2J<@"vXHopm0ɽoϽ?tKQ6>.fSܪF2pIW0p a q2 b?f,c3Z$ rN=TLyvedJgmgoإsf\3Y"/Dž㲏\WKNDܛ~5( zUGtT؄u}< >MQomIqN^NO~~Tù =1Ag럯ƭ1龰ʭ@UIl#6jv4(k2#DxeP+,i%2rW!Bl@l!G6V:tH3d ڿS0ty%3e,=X)&z[Fv:q[p{cu k=\cη4A4&訹+\Ǘ+:4 G󸢔i}{R΀D8bf xU2 u_ފ{?AfA4Hf&(x5A|X0`~ͼ.RVO`uzKSu#Vb3CiP|5~D 4sZsQů7_:7?Hݭ1q+cD;Wp AOW{ ݌ [邜9]0ar"ibΠY-qfv  ʷ,&MYװ7u+ZR6 5S3 !>VgD,3 os dԬL,ʤy;O3"}4Ϛ_"Lx^}'!g,}y%c/IݶR~bN*vgƕ0P(.hR ,TFJ!7/7Sͱ 2=,gz8* ).NVaF[횅gk4EL}i-Bσ-tJ~ 3!(OnHdU3)Y%/,mְ& g9ш7?"9dė!+rށAދg1*OA.mb]/ }bG5!auRK=}h~l3~*g+9 !{"Q3繋)QV|@R)m@ܾ9Ї/>e̞Ԧ eKuC\3!@rqDȀ']F]ݢAXgeb&7|ؔ/>8~Q("LW̔t~ rQc!,a+ Ο{zT*AD$H ), 86EHS4Pǯ9]@ZNR))"LEJb@7+ö5ӂ_FtѡSS .'V(` tz__Yq@+~=GY wnfWv6\"6Q[}yv}w씹Ic[+i4yd}д`$[XmUa>ÕD3xD0q}"فn YQO\].'ӫ/~_ F C>P^N2 %"^tbNF#]kr{ ?n3wS[d/4*X\ ĀԊ _z6(~FܪF=45R?ʱ[vjX4iN5! )G ޖ&8A Н!+Rg'g\S)-&wĐʑ}* R*]<:hCo|T.kbg-OsljxGT9LnuZ%H8+\L nwXElc%x,Ñ;LZ3 Oc1PX؞~?ya3IgCC"Mz"*=Emk fJ^/= ga1.T'ikMr5pd٣L74"v L #X~) l[5VZDöM]QxzUEey$IG>]`9epQ䇗 8-Ѫ$O4eA*NQzɟq~&g>g59! *Ŵ9VE?< ,8{nFz]Fξg<<>Y/,FhF X]HSMmj+@5C9ˡ#L-U&Pݧyu}VIyNIuУGO؜X YOƽ`i&Pp())K=S;җAHTYr4i0劒ČSS0a;0%D3_=L]oQI'+<7cDgBq NjD/!z*5vXT-$Yimj\W#ţtjq> 9ѨVnw0 es HDU:\k)clcTx8h26]0)muyu(~IކMR8ac.X% Eܵɀہhfՠ9ltm"@Ju]ڝ9q6&xucg|B(;4 &EW|*TV+5Dh]}Qx(sMKٴsoèk#wG+D`lId?20_StU%,:T:|Uf7q/43e]14{?dgZv)h&ʗ g7?SH3E3LIܚrfm/ixdƹUx[:nϪx7_AC*781[ճoI[^1(>73mڽVq9 `biFvP)VCSh"_IlSkdf+U@!/+&ҁ'pòH(۶t'/-3>TǶk[=הV-с~_|cE)>ddh#YvV2Vx<ͨQ~0n1ZFA ӚG=Vծ&úy5!]zv~dWҁkN {ihP.E |9&^׾x<\uC3 ,{;qp ,2pKBWUqܠ2Gvv?+.CV}6ß9vխzjO,[X|>tk怫bXk(=AqWU/|&s3EH_j\b||ǁ}#F94WQ 4y_;v*}^$O.y)Ï> \$4Ѡ>&oaHV>f+]ӯsEz2,Awr6 kk_ARP"ƁuE#*lj!+"n@r0oX/ (KZl|!.T߳6xe@vk!ɢGF ?˜uS[t n-&z0%p~wFI]*i<)}Pڀ"#6zJDbVeAxNP2]LՎqqCf+ߝ VM.ܝd%D#7$Zk[1cajLظ**,:,R!\jǗd-Z9RLZ5kv]ؔ01V+/B>CОYS%x%z&cSBFʰeς"_߀ظv5sΈH߶ :_Gi'mQ1e˺=b'o6`#mcAOFj,;QBl[ћ*"0OLP.ؠed[-L3XQ4->q@ތɠӨ]%hLH>bjVZ;`U11q Bs3:J}[:]=p%(LF _zv$r<֕0B BGů*kگɶ(8ۭf??)oEǮĽꈓ{wE_pBRhQj06DUn[#a]tTp$w H.I-h?p eag4oИrlnl ZA^9SmdK$M?K踞؀ZwphS/I9c~[T2aPm%B99ׂ{/"ke:ĩ{ezu[A>]+P/v=n?TI%/Cu(ĵ3X%ۮ6~u[t=_ԫ0PS#}>uX9ZK9dٟ>N7$5{3Ccw&)]TLIvn@CzX6\qWyn1t &/r"/ S} a\̿Gx `UM|ajAzJ f' cg9UѧM&7[d-).HT)]76NJ!QPtgҥ]_ʊ d&RFq oXgErJ Ԙ{.='Fm$9[qw)vЩx.(|!wOҀnS{WW0t. `3R fw9rmec-`DO&w."i\++a? <ކg4Rl:F0uWf\ň"^`~ӄ{5l1"Ah9Zi=gUj q %3Dy_ׅ+B>DiLi7Q)wz!s^]֋V%YȉX.c*!u` p P#hZ<tòsm`K endstream endobj 29 0 obj << /Length1 2497 /Length2 17422 /Length3 0 /Length 18875 /Filter /FlateDecode >> stream xڌP cݝ@qwww[qwwbŵkPܝ>w2YP*1ٛ%\Xyb ,ff6FffVx u+NVvsd. C{; `ef/@ :S;x:YYX+ڔE;@dejlP0vڂ2M.Łݝ֙Bnb P:܀f-mOPrB  lLv W;3&#PrX?􀿇`ad'޿Ylljjo`lieg0$]<\vf mAnV6& J7HAݟ3~YLh >q+')hLG;{w;"s+;3m:0iY9e,.fffn6V0d@ԃv6v\\V/gaYLVv@`;YytAc0~M03{;?1%1Y[G)*jf`08ظ<llw3XД[ {=hK[ 9MAo,?J[_zml< M,je\A bga %=g286N++xiRwZSI>ʂ'3tOZO"triF*`FJsh3|*>,.M 1Y;>E#~xw kѼ5BQ&[ytN_B5CQd<ڼrЗ7m ~:}:<v'l ,Y3L䯎K4Ǎ+z~Y׮MÜ<]Cbtèw0f:;?fa/S1Z/imskF>MZxb+9~QEӔC1/)'V%%𤤌-=FgՐ9>s֍IJkIgpxQҭH5sE>JW ;)t")v@jf^kXjQtS富/&[+DfC%FPЬOdAW4>C'8U. fieY?;Zϭ(NҰ1Fhg#ՙf#5f$X]$Z f pX>K 2\:bd>ECвX3}a֫nukV3*=S0<%ƕ(vj5)S!m[yÐ8b/.92sWnٶ ɸɺ#}-#@)ݝΠ;GI%d_}OG fL0g~][(c=@LfxϾ(r`ԩ"3 ;DFB4q_c&', Iɾy1fJSu됇|Z0US7: ^ ϵ]'m DZ5 ug1;_vV7z'> 7ʼ-vUM#{ &Z[*{Ml!ٻHWfp,0aI(~i 2xvZ/l ʣth;:S!14z^j l ڱٻ_`{F457˼"{RUg$ej?z\fY[J'2Rm_Te2F@Q;8T48_6H4w洯m kn)Q"&2 HQq~ *a/-`RMNK{L@Xb\%eґY!7K.pE`VEoE3»7m)T!ȎKQ! K~X$]Պmcf!6mW4eL֣1{yd^#.VIs/6P~E'0EYר댗|ɚ՟]w+Yun5hg+?ETާۄp5tm^V[CiDkIلJ2Ewi|SSW_EZvܴ@ޕpDq! Ǣf1v{/QK稾T6)3{u=J2m\xYLP^ fNH]$\KW!wMj9;Ty^xm'&%M`34ܗNzא R$/!*?e?+>Hk~Vs׸>eO:HiUmF MK!5ǯkgƾ،pXvQA~kN瓯Ib_&![¦s|&v#8"3Ǘ[Hai}=Xm>:!3pS\fJEc!Qa xuk5880nĴd G<2z҇8b,Q"vM,쨸FG*kjIq%¨^CY;acTx?u6շ\dS&s©d*SYXIZ,w|T73 JQ/~?qH"Q ㍧ H lɼ8 ݜ>(}. 7N= ް`(W=`;T |-#5{N$BUyF:^RqgwixogMZM H?ēѝ:̺ٗ+80!۳9XtVoUߵ^3flm2IgKԨ%7ߌ]ޏ5v T |&xܕ_q"c?D_zH`E-PӡW| ԍӧ""!̱,J;^/J \Y XiD7/vLʽiU+ԖUhIlpř`XW`Bw~-%de+1d:WjkJNc[hѺ*~,$s3]nK,ьSvG'Lϒ;.?(xïe^[PWYH65y(AmX~O"L)dE֖5kqMM N\K`O\'[Z(%uZ;OÛa#ѝ%\^#̑݁=}gIfՄF(=.U{DI]fۃ$4p+O=|mXH,Ws'oV<"Y J,Pᗳ/k4; f,k*:z#Gc"S/oh`^~ւ*/UDSOSB9q;8E 8}ۄqi6Θiڻ?S3߸HEǺbmG!o]#ZCԄ&u+: r% ."̌rH~ÛtpݛRmn1|b`N;Z7H!_!:Ţ"o?~6*?05$,[p:F$e&W82I.UK36˒ٷBhAt͖ط㓠H$vYEXd/Ao滾ՒHTNuamk2Gq]9";B [4|Yʋ $RlimXF[Ļ!`E󦕣 @y~4pfbS)V'@T-u]],u%[i9+bb3/X<?H@Bq<7xGd-kN?n t&.U,nj-K}u޻UYnwk_6'~?"L]L8Am$6 riK'<\*p L_[,ZaE'Q.Mlդ%ʊT=V ;an< X4iZmS iJc0ЗEEF6Λe-L$2}Ck.L-R,]:: fg\!dž gb*w~$$=zd)G`n S&nYma0a-RAgXIʽ_(eq33NQxt}ǏKPM26nʿV͢u!\ڽfwf-LKxR뎴ոOѽsse?b9]R;edsфlRFkN, s%4Z-LLh!c|3DJdnã(rXfC"V6SB`y`:w?ůd8)xH$}mŜ%]q_|Ҩ-ѷ5pOZQeփd)_54adFuoI5iaΗ$3d:8Y65x0F8^  #)6U> q,(*caV8%?YYy&=A9lFKBDJ5n"j $\jei0y5X|`3 Zf}vGc,Qם@bF2uZ E$Xs_oz/ǽ WGA|9&&yF况RN%&?ՙdaݣiXNip( 7\A>6?hVYt'CMK$ r^H߽= ~|#XV=v |{֝ia'MBnc)Ȼ^-ZA1C%LQG8{-?sضR@?-jK^^\Pu3Ϸ.VmEm1 f~wGD9;Hzf$DϯXm>܍ dyTYOXޣCϨ2RF/Mvx4+]&'EJ-|NzZ;oJv7I&g<_Jr+tU{>&i<9`~)>jXMla֔NAPW8RH XoQ9I(!.VbFwvB)Z鯍_Ӕ|z}O\GC)2sɉ8RfZk^o ēW)BTsVyLV)UܐA? 4 efo,/%ńq3]%Xenj%rLW>3(lv[^]CB7_*'ˆ2"5`'[; |W~9mjѣe47dQոVC.lV^.ZfCg`ItMX$-{~W3Ӽ@w-'6a8f+ Uuq =v ;QD/TRk'*G0\WwTr XqpNO݋Cj\B8!giWi+iqWG+1bfx!>߬UR`Lm ӹcV?HSU8y_ڢ${@~lx#k>Nӿ3UNK.+^nu=#ezdbך2N+c6zμٵ=>ݧU3oc?(׎f?vШ!e4rU9s_璟DUbPj-NxSnWwiϪa}Կ罁taWБ=a tMm$Tvv ^ XxoM1%g8t 5kD |FQʘ 7+ a\+I(BH#6^kS 9mV0&E]F]ZRu;m#AN1r͵{QX⺮y)XD۔G>yfAGn̗QAQ}`"ݳZQcSnNUS#vSKB{{ حΑFgy=+;favj6J61 Ullj|H7 P@ ]Ižuz/[Ox}s*!ߊr]GPcGh4BZB"7}eiؠ _&Z 34+;sd"*JyȞ3=H56hTQ A!ȭY`mRt+ z[pK =i#TsͿcMXq}%*=))-W9fݩ,愪B"GF4[C#:76&Q_'}dWGC*v{L$e=E bIX[bxJca+F"3=uT4Cd*73ܲRi+&V 3.ae{%PHK\64,MǿJF*ڣlFN9HD?ްGg6guJif:@yaR>V yMWIi}|&7aqGWD5Dϐۇ+9mb{X@5K0K=+/)4GS?WfTbR-;(֑T P8 \5A)iMh 4Ol pw"3ƤazJ1ZU_G ׾nu6.$3&6~y6T%U6ֻ^5ԿBqn\(J΂ZÕ8=D!4ESg`ïPg gBuQAY/irk]ۊ5Y e]h6uEVj; c+Ryjj_3y:`?q9iu`d-&=U"WXokWT@)~)!Uu= omi[*D> O#/K,.;dˣCI:' EN_q!={>]S!ɸiZ)-ҥ0dB}/.AʋT.j -Y1E M!H'fW`CR^9dՎ˖@(C쮕Gr-ԙWVr+[7dW;7w?`p{b[pյ(+:ʾ \ ^ 0HXa(\`I{s".2g\Ϡ<5WoA.=T!ӏ鐥+|l$;`?iI@+kvk(щ僾"3`;:[IED6_w^24yGğ_4*:2Ifۗ`'t|SbEL,;HC;*t#PYQF~G:tE@R-,-}~V 잚bнȤrӟ!#1|4% 5|Q3go/'S]fFPMR :`.uw}.FBAR CQvw1T)p-F NSbKzE~s\^( aq]_Տf_Ne$ik`UY+.inyzHS^s9EpE+g.[-OW> Yop>=U]Hͅ8h;?(Ý\վL %egҎsP8Sr2Kc뿒WxP봤.dGGqйBsd m?95o}jl5 t%tҍB26lnp.~!6*A9Lni;ʕN,J ЛZByK{V},}-şG Msi;dn;-`3eMMC_ǐ5.<{-o8<1Ɛb*na18^P>.ao~g颅֯nP<w.!5a~]]KC}HB}i,a^5eZ k XCc=%+gܖh!od:/MX580.c:lFYn6dC8e1R,:׬1%#nL-}qT#m)>`h5Rg=| t&djtZo-չdJEt|1vܱugHՍF 9S$͟'UX"A?hC 913qm/6<ԳPs'ٰeIU>4!N.^#eK/+9ۺ\ZLPw#sޛE]MM+9gED=1:(Ba3أ¨*L❄G 3uUZZˡ iiw*vJ:*ާ ~*) cRe^F 1iТ!0qn<`S==从/kB6_=9!Ma2N`јg_]k,l[ɸI)Σ5\ѭVBd@R.EQJ/Di"ɖިRS^Tbƀn6Ni m\ 83 krxZ*z9gE@jh#aIn1B[ЫCD_&@!3Ӟ1@mgp7,&7o<WܩVN|lօYc(۬67^^ҚZ *Leͧ'Gӯ\,_9q] ,89rBm\m\y&l%eCcaj|C!ٞcz%qfWڇ C:0湤/93rpK 5 H{YS<y>1s8ςk ^!8sw/9׸"[7ڴo\jc,6X rn6bBEtRr=lM 43GP7hw_EŠQ|ʼnk;*Yl#qd!=k-W0ۏQ$dE P^LG#AHv/zNĶ: {TZ 8F"X>`ٟz/%8@eqt(JJ.4 r?edT@RdX91"V%bB *t[JMOq#`)rHZ}`᫸kޜeȒ-wI_%HԪih0Ig? yyP7Kr7tι+y&* ^e|T-h=9 &w_ rCS{ JiYdӽSMo媝zdC|Zo9Q(2˚+X3ޖ20T|UԻ5)7P̏Br,PPd-JCyLT]3N͒Ԯ?u#?.L.{I9s5TUJG8NU!r@0tZm q_k+"{YpJF}C@bԶ* 549z +R`yS05䛢VAĤ,J8SAXȳ'}B?Ll90Z;g@P*2׽rRYAnl]@6g 7#*n xౌGt04ZetZ3,"tŠWh KP5]ƚBċkfMnzyp/Mu% ] F5(,:ޣÈM,h3Fmk.~Mk3 Ɯ(S9p{ r,ơe⻴W1_Hي(YJ !VIH?#;I@c"C%rr&.]dP݊ip2[:|T6p3!= 6Ub@jb(dİ(+Z`NM? q#fe;>lww`02@ڢP f\+R_$lh3c%QLpQRRoќD- krWn݁B>~! *cp3~QSaT3SE,VRWEOMtϺ-~`~FcӞ_HTw43d1| 4o9E֤Cjv*NBqw,dRET M9,(цz_$y#?'(7BD%y3yrŶw1oZ:#|j .; //=/|T -l4''9"%,骢\ ,=i ՙ0R]i>z:k"PV}s250}q.B8/LJtB:6Ő#맸z?}55 &{WJ)BD1SP7++v{bY$v_iy>CbU9?mWVћv" w:㛨#4#;WOG@`Vv&>u=cZ?WSԬCL™ P^|*3IpЙfY:f<s*oPY͚fb+UR4ix{3NjBqUI<+/"qklZ#>#IE 7O`9ֺP n?"}id_ AmqK9( (l!4_[!h5q% tri MYŘ-ZMJ,{YבD8̣IoFx? aKK% *įu|#? 3:_/S~4GL òo*~ vxX]V!L!Hp=RGV0iD[gܠj8o"Wl8V?:u=Rai0}=4Lߞ]3 ה_swI'[ʹO)׏YVFwS=,؇MAKDkN/ɓ0%4x* ep;N.Vxg:75 yաBy[Wh)vow7QtN88dfK]E;ƽc0>_t?S%f.>[[o[7̟#ry+-p\HFN9(7o'ьq\tkL5{XyTF:|»04|Z~yU߲84'@wIzH9NO( lrŠ(\QQ25eg~Swf%8SQuR?:"xs9aٲǐ^o8fq%$!ZN(wP`kщ]sqB{ջΫ|hvK#lS'pUM6U?WL!JNɎE< 2)C[[OdTDI5[jyߐBB:En.4-$y;&ef]WVRʞݫCaiy;`et1Zߋ""w^9dppbee_Q/ND(葊TO5p[BTJ%S' ݗ#5}-/[+eܶ?FZnA+t|/#?H.-*lt{i&4 'i41TK9dUe<2\o>̾dæ endstream endobj 31 0 obj << /Length1 1410 /Length2 1769 /Length3 0 /Length 2676 /Filter /FlateDecode >> stream xڍT 8TybRˊ..j! rB[c951gΘA%JH*d ElߦJn;)*LTvϠgy6t X>>t )4裟a"E@0>;6N]Q8nt&4G 1;W pBHDaalj>_ ff&&`l9lF90G|Ql0&*H(PB6 $p +7v(4|h"p )b a]{L]&`pBTn2[QFƓF"`$`\(l"ga;6Ml8a"21#\ ! q݉$Ń.OWFE`r.g_cfjh !>U'" nb04 5h(;8&" sp su &l1X i耦}zJ0"+:|M)hkJA0 173_V`ÓS>g:"<M Kǁ'@.J48ă}d6;sJoMQ{b\sƸ|\n>nې[">rpMɺ;8Aܒq CwYg;mx0CxcF oss[rz%?mT7zT5ܴƽB;l^2aE|7\oM\-~dKRȱmOd?|{NGvnNidK|E*HU7V+)Η:=ϻr +%YlV;̏ȟ<"K\4ͽ63:MQoysJV[gy8VdjgT'lXޑX=M\?̞1ެ*-[i]{uy[ܨr,0F}& %9F~)CIY4{] "&Mɭ}K=ּ&K9pm.6 _:婻I:]+X,,sOT8f͍^k\.ie<g¡+Z:eрVrv|jY7MsdF\ _0w?0Ͷ2ˋ^67uM*.>i_]*e>~)OE-jY\{F)ׯ=TS ;(QӬ͜iymazqDD|&5ڈ1552 i :/G_r":nQI]-ϯ^P'n0cw=rpT^ѵxL-Wd%(S Ǜ:_,+x&12۹To`E2 endstream endobj 36 0 obj << /Author()/Title()/Subject()/Creator(LaTeX with hyperref package)/Producer(pdfTeX-1.40.15)/Keywords() /CreationDate (D:20150906171159+02'00') /ModDate (D:20150906171159+02'00') /Trapped /False /PTEX.Fullbanner (This is pdfTeX, Version 3.14159265-2.6-1.40.15 (TeX Live 2014) kpathsea version 6.2.0) >> endobj 2 0 obj << /Type /ObjStm /N 28 /First 204 /Length 1704 /Filter /FlateDecode >> stream xXs6_n% or8c夹z@KFJ-+u[fnF~BK i0B\¥\H)S‰r !2Bc]!/UB'm&,^,~*!v)/H@*/3IcTy-,5B:U@E;)4謃ʠ<}*h,t 5l;4 7tR^ y]+rЩoe3- #?N@X u1X@7r<FƋ@9&0i}JL:qLR)LF8$<8P8ҫj> ioUG7]hE4uROˤho>a"9>rW7x-BHPC-\n]Ro 7k ;i#\ wsb/i̒? e5m[䣃zdܕM?N=XbFI~[u;7)g?2}Y}=>Et <704BFF87B46D06EF8C14D7C779018694>] /Length 107 /Filter /FlateDecode >> stream x PEOD ?0CVbv⛓dl Ls B  Y968#NよjlVz[N˻"am%\O V endstream endobj startxref 63770 %%EOF jsonlite/inst/doc/json-opencpu.R0000644000176200001440000000026612573053677016411 0ustar liggesusers## ----eval=FALSE---------------------------------------------------------- # mydata <- airquality[1:2,] # y <- reshape2::melt(data = mydata, id = c("Month", "Day")) # toJSON(y) jsonlite/inst/doc/json-mapping.pdf.asis0000644000176200001440000000030412573053677017672 0ustar liggesusers%\VignetteIndexEntry{A mapping between JSON data and R objects} %\VignetteEngine{R.rsp::asis} %\VignetteKeyword{PDF} %\VignetteKeyword{HTML} %\VignetteKeyword{vignette} %\VignetteKeyword{package} jsonlite/inst/doc/json-aaquickstart.html0000644000176200001440000237017712573053677020214 0ustar liggesusers

Getting started with JSON and jsonlite

The jsonlite package is a JSON parser/generator optimized for the web. Its main strength is that it implements a bidirectional mapping between JSON data and the most important R data types. Thereby we can convert between R objects and JSON without loss of type or information, and without the need for any manual data munging. This is ideal for interacting with web APIs, or to build pipelines where data structures seamlessly flow in and out of R using JSON.

library(jsonlite)
all.equal(mtcars, fromJSON(toJSON(mtcars)))
[1] TRUE

This vignette introduces basic concepts to get started with jsonlite. For a more detailed outline and motivation of the mapping, see: arXiv:1403.2805.

Simplification

Simplification is the process where JSON arrays automatically get converted from a list into a more specific R class. The fromJSON function has 3 arguments which control the simplification process: simplifyVector, simplifyDataFrame and simplifyMatrix. Each one is enabled by default.

JSON structure Example JSON data Simplifies to R class Argument in fromJSON
Array of primitives ["Amsterdam", "Rotterdam", "Utrecht", "Den Haag"] Atomic Vector simplifyVector
Array of objects [{"name":"Erik", "age":43}, {"name":"Anna", "age":32}] Data Frame simplifyDataFrame
Array of arrays [ [1, 2, 3], [4, 5, 6] ] Matrix simplifyMatrix

Atomic Vectors

When simplifyVector is enabled, JSON arrays containing primitives (strings, numbers, booleans or null) simplify into an atomic vector:

# A JSON array of primitives
json <- '["Mario", "Peach", null, "Bowser"]'

# Simplifies into an atomic vector
fromJSON(json)
[1] "Mario"  "Peach"  NA       "Bowser"

Without simplification, any JSON array turns into a list:

# No simplification:
fromJSON(json, simplifyVector = FALSE)
[[1]]
[1] "Mario"

[[2]]
[1] "Peach"

[[3]]
NULL

[[4]]
[1] "Bowser"

Data Frames

When simplifyDataFrame is enabled, JSON arrays containing objects (key-value pairs) simplify into a data frame:

json <-
'[
  {"Name" : "Mario", "Age" : 32, "Occupation" : "Plumber"}, 
  {"Name" : "Peach", "Age" : 21, "Occupation" : "Princess"},
  {},
  {"Name" : "Bowser", "Occupation" : "Koopa"}
]'
mydf <- fromJSON(json)
mydf
    Name Age Occupation
1  Mario  32    Plumber
2  Peach  21   Princess
3   <NA>  NA       <NA>
4 Bowser  NA      Koopa

The data frame gets converted back into the original JSON structure by toJSON (whitespace and line breaks are ignorable in JSON).

mydf$Ranking <- c(3, 1, 2, 4)
toJSON(mydf, pretty=TRUE)
[
  {
    "Name": "Mario",
    "Age": 32,
    "Occupation": "Plumber",
    "Ranking": 3
  },
  {
    "Name": "Peach",
    "Age": 21,
    "Occupation": "Princess",
    "Ranking": 1
  },
  {
    "Ranking": 2
  },
  {
    "Name": "Bowser",
    "Occupation": "Koopa",
    "Ranking": 4
  }
] 

Hence you can go back and forth between dataframes and JSON, without any manual data restructuring.

Matrices and Arrays

When simplifyMatrix is enabled, JSON arrays containing equal-length sub-arrays simplify into a matrix (or higher order R array):

json <- '[
  [1, 2, 3, 4],
  [5, 6, 7, 8],
  [9, 10, 11, 12]
]'
mymatrix <- fromJSON(json)
mymatrix
     [,1] [,2] [,3] [,4]
[1,]    1    2    3    4
[2,]    5    6    7    8
[3,]    9   10   11   12

Again, we can use toJSON to convert the matrix or array back into the original JSON structure:

toJSON(mymatrix, pretty = TRUE)
[
  [1, 2, 3, 4],
  [5, 6, 7, 8],
  [9, 10, 11, 12]
] 

The simplification works for arrays of arbitrary dimensionality, as long as the dimensions match (R does not support ragged arrays).

json <- '[
   [[1, 2], 
    [3, 4]],
   [[5, 6], 
    [7, 8]],
   [[9, 10],
    [11, 12]]
]'
myarray <- fromJSON(json)
myarray[1, , ]
     [,1] [,2]
[1,]    1    2
[2,]    3    4
myarray[ , ,1]
     [,1] [,2]
[1,]    1    3
[2,]    5    7
[3,]    9   11

This is all there is to it! For a more detailed outline and motivation of the mapping, see: arXiv:1403.2805.

jsonlite/inst/doc/json-mapping.pdf0000644000176200001440000062644612626264003016743 0ustar liggesusers%PDF-1.5 % 1 0 obj << /Type /ObjStm /Length 4757 /Filter /FlateDecode /N 72 /First 609 >> stream x\kS9 }ۙw*ݺKUBHB24~#u}4}ѹd)&c)1ìP̲{XI2qW,eK& 3@3DRJ!E.e2АV2%A)tVYAŃC P5ǫnz/B0k 2F]zY El8f#4 Fe 'U[f8=M3l(4lnjg"`XC$ gpDkI  CS`- c%s23d/rFR+`s AV߿ҝ|:B8;bA>f=Ev6)s6~8]L>OY|ρo8dA~d`>;4u;O/$No_v)9\H Ւe=n0bWhj2/M4xr> ACh'~_7S{y^^dp3c/Nы/_,pǷgYhd0Tv4#O_@ɟ0a ("m|M@=r\Ţ,wEy!=޽, Y%W/,.և7ofdYdȂ ?bPJy4ϨwQWE*O5#*6c:rDG,:bXtĢ#KڞG|R R=#lp`gUKe>ʧI}2@6_~gOQ6$O7ql]bsHЧ'x^  pHH+ )u|Qp*avg`d6YB)CmmDpü:~f Hel4<7%9HllIj|>xt9>?[s: _rn{77A?wSP5]pCQ#_+Ap9vmQM~ɿMW5,tdd :*=06A94:_׽U$"m"7W) \zzhih<(_Wu:'Q < `+u Kn68堈cIzz=Y(dFNljTF+׃ne@(r 6!l6lؤ  DY@$"Z>-l ?I:RfJA6:Rd_AOf~ ܄"rKߊAdMGlYJn?lUS7B}")D;3:,sxKtD">]-hog VhژLt7e-M.DcdYsH 6\'Dm4ܴ!|*Ј踵-V,: ծ.F$/뫖]p{6')%]Tl>h9w.iC¡0[D.An:Vъ.S`uU/p2Uy @"%ZH ZA!Ivҭl+׉uJ%xLIJd'UBvvȠ4fT|75J H 9gH|'Zq-BFU¹bT#QO;_FOa-|<}#([bF,0mŇd:{~՛ v8;"GxW0gW4; ÜfYl+%ډp&rhJbyK~4Ȧ:+Q!^:E)u#hi1m>$ڤ{7˫̠_ҭt;}҃M6=L$gi?폇Qzezҿ/0&tBK$w98m &Z99-uTYo8o.aV`0BAXp|9xhBzZt>m^Ac''p멛muS-ukPz-SOo"[_KFEaBBa^59J:De"@X,m(j*AC.M-W}:ArzH%x[%vo]@n’Ua":gE=qDNG}谂ô]tpҕ<%,T3bS.-RגZq+ѩ]yw҇\@g5"[~@OwyďZ$FnYYOc\HKvhqdx]b8iܖJR#MSu1+s9̷>A Vy94l%)h5C ָiؕQE4ƿЪQѬ[|?DJ6;{$ cZe)%IѼ*XB5IS\7n;#$?u/?RTҗ*0ŧ Hnӯ]z~KSPZOe+HmmM%UcK? x]/בv:Uo Va[]NW;SL 5jʔl̗zdLn)PҌ})2#:Z[WG2֑V YWG4c2\[Jmd|un w*KctܒxVqx8> 2KD&#҆`}Sui^>Ȏw}ckg[ڲThW!/Q"JXY*aM1 .^:jR+|h/"Fޡva+.K8h~HHAJOrVQJߵ ͿmӵibGR]m_ dIaWnQ-G9.4zv5tv7^D G?;ʦ"n0ᙎq\"mn%q~P;YZhHW(y G"\%PQi8#˚>> ^zHc-'Їf|jKTp9xtx$?h ꭂ4*ִ>7DFRmyynX Czv;,诵odzl˗X#G27Hq.%Y ܵHśװ=kOT1b~yS׌O=AZhϵ1J.0Axa_]pz^T.J΀h> mm dk!LAxL0_QP٭@4'v j'5 uઁc\OuVnۼ{Lˇj{װiotps6`)Z=~gxͨ>{(FpG\Y||:]8\7ip ac#gqǓ|ߐI(e#d͙wZkw>^?Kc .=~vAвlb9,P4hz&]BsNR}2tOAuIR x:'~"X k CfC§WJ Dq$DIDFG{ox@p^+!}ʶZGRkr}b~/p𸘣mslڕbªYP,C֮TiYf^U 3dp?Z} FDt8$8Ud8GwD]Ûi]`0 9RC'<8҄A,'L4dChAvqLAd4;,*tGS*徾"3WL ..rPG` WDwA߅aאaڽpAhx2yL\-E9u)Z%Fu"/A"endstream endobj 74 0 obj << /Subtype /XML /Type /Metadata /Length 1567 >> stream dvips + GPL Ghostscript GIT PRERELEASE 9.08 () 2014-03-12T21:00:25-04:00 2014-03-12T21:00:25-04:00 LaTeX with hyperref package ()()() endstream endobj 75 0 obj << /Type /ObjStm /Length 2397 /Filter /FlateDecode /N 72 /First 611 >> stream xZr}WtվScTMɲqe?C 6 0hI93)"kRɃ grtFSBƐdRbJeJiBSF≓J-ERpQdUB&XF!L!0cȕ0i̶ᆬ JAVJCa:k /!.S9Y$!aKqs)Le n5 I$ %7`~!!PI7V0'zK p f7V$ {pXq @T ZHN ɩc z2OI ˸%.L Xƛ!`,B0 MG C:x*R0n#"#x'@، IЂIx/(K7e "˫KhD >"7J8neM7"ᓻj0 t+CZt\p=sͥs`׈7JlpUkzgي+c )؏Ӹ&1tO ȏyh3-ݢ[EQp:]q:Cd_Re^Ҹ<C_Y(rJ3W7ؐi[czö^=J7cjƴoNwiR|4+@q9[ *-򭞉g6̣Jkφ6 ]n5cm0v(#_#,-ɜ>mۢ%htn:$Md4_ J y܆W=-Mv6mrVmU"9d6Zk(jWm=jEO3ʋ w{+Wj⧍,) hnj.}KکkaK6ܯwvAsB]m*/z* Fޢywnj__m#~ٶ.|-m"o%!˯w-k-bܵKkEioeu;L WLz f@nzъ#~'I#٭}͠z:Y::y;p_w.b䧒%ECb +7ٯz9]#XC4:@G<]qN.s|>C׵6;x{A"yTeS?.ڿh WxrwMuy^XmLyx{q} 0]AeXS@j|/>`*$A@FA߹[%UMI<<~ W_CsJ52$DGILdѵ tyjc!/~l=aI7"RaF?`Kn*Bf d\,=ga-zu)8$;.)LHq}y˺V=Y: ZN):ziabe4 ůn"[yS/e^E>:aDWF)0`jƽJNjx=PnkT?VO ,*4ׯ٬vOp(!IdHxo05>dh3W]uwQtحdXXWYWX-+){e}iY >EEU~G:Wi"G:QG 4H<2Og>we4.PIL~i^93#E 6_d,<;ȑsU #Ce#*,wk#1*T@Ο7{ ɿ{lvendstream endobj 148 0 obj << /Type /ObjStm /Length 1950 /Filter /FlateDecode /N 72 /First 641 >> stream xY[SH~_я;55jjIv,lTQ@c1H`|edĀ-ӭsO}NK` f]2yq,haR$4 O = L:no8Bax'AxzRz4SFEz)눰Ly@8TȴTiEB"xtRkAzôSȂ 0LK=1r9%ВA`2 M̂㘱d b!0+ Y-2ԶV>QP4*f]z+A6hq AĀ1H̑9͜s%$sNZ:MVyAa4U‚ă%3&PR0" \yDIGX~ݼ.iF'~;}:+LcE>c/z<+jY#s&>>mn Ə/[.Mp\=#Er_t ]5<}ΗQ&C07pwY6Y2IBQb%OX.2Ƨ$aN_KvqxWEy~@&9*ww ?C~G'<|դ2g3n ^O|/W_:3^7͗_ Ŭq8I>ߟ5`nuolPCt@ -,VuO|RuK,f$DS' iE[b*A(,ƸiѰr;)jcFip`|FV[[ĸuM08 SpVA؇.b7M X$P:#Q~(}cW_'ڎwFo_ ^omZdl^{yЬ;[w>/38@_ khx_h6<-fuyl&h'y}kam a%A _ePCeCZ,aZXƴOzc>QԷAN @tZ5{~,)$.yN(2|$K>c&I~?-L8ښ,tͰ8+K;=r9K@?{x]>^}Ӕggy/G@t  #gyVGBh,^#@FG 8*:Cy[?>EC5aؒH  f(}uO_}>E[;8w3΃׽j?wyTyN9mw(k\7iw9\ӢmvbiY-XR-e+n%ooZ9y0XM~9) VĴQTQ["}mC.,Y z{~}+ljenoekΦS}6y>:n 7)Q8T{g&=W_K~qr>Burԥ$ĴqnHTHRi>}[HY1WQE

> stream x][sǍ~צ7-sl9K"ӵ@x#I9gz$۵4}㙚'|^DMœhz|V\<^q̨s.^>]);k}vϟ._L/_/'R8aC{dwثg3)YvJ;bn&1!U:]|yٚ5.zJT.}v;{˗G~wnD JJ7i%@yjoͻM+'yNGs S|^\v=9M+ۨx.>:bz !˻$vgnfU uYEi_iC)+UjA>ʄq(s5ٜNmX֥WFˮx(U*8\s'svn@1DzVgM~?Ecx*|& )Q{K/'p+'sH(wq13|L:[И8QtVmʴǫڥ╩4 {5B$Oq4g|oAN>A)spZՕj}'a\2M:¿Q-ѹÈ`tȐfN?* ]!hM6g4xGNf c4&~"Y_쭜+Bס!\Yj_{#AuMm`}r"Ob&8Vꏾ ɀ%D[OЀ) =gOrOA 0ԧEҀZ҃kPֶ]Tn *l51yMrrz>_2:?IG||@WNg`>!ܮ烋Y'&7 դ{_,tj5hq`9 HVM><}r8acYA,Y 1& | R|  e[.=^,MN9yxLh{1TbK~8Dk- =楡WfsWzG~ ɺx↮wv($`M K ӈYt%%u4M@k1ٶhPhkCQy ZSZI*dQS$9 Q:^)C8*Nw7~6 +L09G{&284w"#V4u@g0TP0 E7&*7vnRy:fӛ8)+7{ЕW__#Ca GH[Gİܙ $ _.9›o7Hj(ZZxfH U>!/C~A X'W_Q -L6AbXo%Γ7Iw&jCRyuCtM $5ע9鵏] a.$z0nb1w\?6H}V&Ѕ9zHn8"S,%NMSrCkg!ɔAE}z]xjvː(;N E܎no7Pb:N.dvHpYia$$a4a Aef'pa'N7 G -1[`QM3jm:fƯp-ȷ9˹Dy ) 2#sn{$HoG8NdO/h/y7af躇O#䰶w@܅C*\ /;{ހCq;NqfZqKߚɰ~ = ~=WG@-4洇$rZ(N~(/ظi >CAxۗ";׶re 6al+g *x@Eh hqe=':n[yX{n&,};"l`lpᕛJ9}]WTR\Oxk\:2q'uLp_zp7Jȭ?2i&8oל,`^=ݥZt8 Jy|Ww xToiB466 *.< )Dep6Rƅ FE@u*9~a{CU Fd]w$8#w/.sa|s}P6Vvnˍ=D m"ԪϪɦ)5n.|sz r.È672f)o@4~q@rj(yhp" *³TH7(KZd;6TdL~ R}D}Fln)&NyzQ{é#J:e7wC!ЯzK mr5,y{xs,%*9 Hm~M|-P鼓_w\a[mLAI7*MXScHX:1,x#c‚ˎ0܆d7l@ߊ\*FE*=Fp*pkɉul \9*q|/5rˠIIeڢ@8L ub{zJUfNìdXT#ޜỈ6NZ"ux"w覥SG 3`#$; aF]7AͤŻ|jgLpr Ϟ\/9LƎ'd EA_<.h{] I:0BX0Ud cͨ,`YE/|<*: 3 vRSJ 6J'D'Y+x2%1~ .ͯxD F(4̗J~q6[ #G eTtk4rdaϘ_gk$m1+%ׯRh!<%S&…V,:3E.'pU-,fW0'qsbW|ft`+86`ɽT@\ ːke]ᆮ{|pO %y[(fNGuPs1U !shc?S?._sjD'/?g|nKιݑxxb|)^8nzE0B/J탂tZ0S-{-Зx5kaYM}ŶV̍)Iq*(-%ۢ5/|6$EI;1 --l~hB˨I-;yfw!Em .G[Է"\e60g9>_&+caF+}5-xAhGN*>'Wo,?K$S1ঋIV X_b,nˬzȕeGE3m.ێh_oL &[X5XFAq?O!P!EFfuɳEm2 ~hd[ |Ff?(jx Z~1(3,?sNr#:}(_I%F)vhX_a{:Aq b*|J&\} l 0![x]t:&n p&?"NGnG-<~) -@ :?>,(Vrԙc(Fㄵ_H1UOu$;Ӷiʮ# Ÿ{SR'a[Xm$1H M WʙfʊDfI"6Jʠ-}UB^ %3:GÛ& 4)uOV2rj?o5yF'myjgvx]Y2a%%*0:s7K&j~kl?_fp=᯶hC Cheڪjʞۼ( :#n~\K9`#Q(p)ԀuߔV%3jf)1vd_uAR셯Ռb#_ط} _-ǻylߟ7 Wtd ,jv tEarwW KrZo)A"=/Kl9cf"eB8*HXj+Bp8]1uB*b`YyUx_#vx_Ciϑp"U- !`gzsd\wG/0gޗFCUV%RThTceG0;,6ʃ L 7+3K<cYM&d,BY1I()ȃ`&7 KOZybMa ؽfC$ߌ9( +:7ֈ1w$(&d  BfD- 7^CwGoQ٬'Ctj)=e3_UE87kC~TbLL+2mibxB1%ck-y"N~xC~\,J]Siꍋ;<ßzZB/XIut)СqUS$)"9z R_J,6ZűnM% *1beҶGPSg) ?~&}Fˡg,u[;򯫈|CyQKFE* DL(8l?\ W J/.’mQ^f\Wo_![ -|ONDt6׷BAt)4#NBl>/ND v+ͻVdcЙ !.QHNaQYQ!Y$"$WWC1tKYCqOԔf'*f\/'k]9INټ"fߞd,e 〕 ruo2. ї5.'M %P yYv1G;rendstream endobj 222 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 3326 >> stream xWyTSw!{]JkԹ7NUGEG[ѺCX/&NHX t\jeenޏ9o~tv?{,ϕPND"yN=8I48 R4ͿDߢIL\2V 'MO3gsGÃ1r&,$Z!?kA!|¼0 ]TΟ8E ąB動1JEtpOoetl&D-Qc(?Y],OT Y:47|m_dEZEORkqZʏZGM6P )ojN}ByP)OjZFMS3LSʇzQΔ^$IpZtQ)-t!":Ibn Z6,beß!2dQXvigWI:9etH.'>uMZ[ u-ƬP EFMtEx)In0vA#ءcpzBJ3+,rO`E4][$ϻQpTNy+vps6DE*F:$7gYFVWȏ~xҟ.?[rkt.AЃk.VE_*m|Az[,T)F~f?u7ʢBhj(ٳ> V.?g&G>w EܖigX$ΣREhX%(7Iy6nݚaqhC.}-{;]2`ʭI!8.wRVMu"MBii6 :3Y8[zEiyȪ0=BkީpwA8".@i5܎TNˍlf Y3EBC6.&Z[Z"nnj  d B"ZP`2 M\AQE]AqU~q`u+n 3+ J?)3#!$Sy|?rv4JnYh8Er;#L?6lL&: 3bN5ƝЮnUU=\d廢.龋F UgT\`ՕnU%YzfW) f0B#Φv m_;Ez5(`+(:1-(#Z|I5zVj5VȌD| `{ Q_5d3YSq8Ԯ7 !Nk ##TWN@rƑjpH%&uO#˚!h~{L;fp+2#,(Ruk OR^Tدhþp`,}AYm9O/\iv#0wt6ڀ.?WfBW1Ȟ{]={Hf-H mc9Sk.7l"$ H}ԴGGG ,X!q =%5xm'#4l3)ӊj._JG+R}&)!LYYfnyɱcM$Fz%B^V */`V?E4>ZϩI~5^y[(]!Fd.QBa\"Z!s..R>2?ݘ]؇_1Jw{G6`0.14k }.qvGviQu-Rtߵ"5";wݶ fFoӫ3 yYjãsyESj?Ҭ @Z*URpLgJQd`ގhs6"95LCOp+Or z f?̋uŲ1>o_=9+KЧrK?# mSp1Tf4p0`grˠdc)K37,R{x,}hzy-d(DSNN؉!i]1u)ڄh <~qStl:$ujnh 'E 9YٞKK_BhkJ<+j-EOJ-em;mogo3[{>/|M\!#2ێȠ$ZIcl}|tBC>K N:D CeJ jl5t@TUyܻJL$7;~qwOceC,&X0 >-Hd?א7/B;cAk*GGcpy4.sgG럐C4>{5) K`) ?ؙ[̵ە`s1Q̫xfNaUvi ~ c&k$ތ~sKXhm$&ޚT[crx(^Cxk :|PxQF4g&pjH;]*. \5۸9^M y̛ {2;RsOYk gǟF,Nh?ZrΐFewv$_=xٕԚf緕n.uz1}/eu \Yc؍-"D\yHʰg0  i&3~:+Pϙ fR*5;w:/uˉ4 :vAprc{ɶc y7 wҪP 4Lo K)UvCGڅ*ٺRjԚ_hh sT)^$n_ϔ\ Ɯ EL(B4MX[l řILL8ѹ!b և}s{87Y1R> stream xmTkPW%623nG`1Ep|uҡEh} JذِӠN5vhLE0Jjgqf!n!޸&= D" #+ 1G$l&_d\} + ?pHADC"ċ2) 0O7|`e;jِ NE'LP(lGa"i?ω%/ׯTDE._(A30 uCD| aht7?tʀV[Q*Q&EKs̙K 6eXƋ,Mq~l[Ho5Vr2gy=t:جAy`9⨅Cy;HH^ڰO_]أ՛+Jkc诠i|!E"(+CdEMQT#|=;4ox/0^;w4oK[Sz)RHĚ-2FNxqsd>4p}e[6jiWa`Vn|pهmX$G2/ 1ǭtϏc5[ƐРx>"PUEٍPHolU'Rms2POUP_1hHD/ɤ(q4Y> 8p4 1gnΰ*Ol ڋE7=́;ދ\Q1dy|BmA"NxksLvi]RSG9 pc#,UeEkY*ePD;}.KV+7Y&7WYkO^ALM('2PuF}Pq]/d?Nؗ˗G>ZS/=.7hっ&/PG[Pq(s<ԘU'].z^I:]^HUijRk+O/_K:V* LD(32ż*P> stream xU PTUֳcLd1VyL$^^- Op7^M­.)IK?sEVG&dRh=͐٫v;C ] `/:Ga"/+XF6IгM>yI BM+ 2Or^j#{] \Fczi pŻ'"0 9cҳ-$f6*VEػ.Gg+Kp.]V1\ Y?`[+#e4JNbhL⑟p{x2x488L I$9c]p#p:3ƶmPK4l޲C {C碻K)*_%96kNiQԥeGT0$\28=*c+sI Nn).է{ ӝ= ,Un(Jֻ)j[ ZI`t2=!8|ׄ -7X"#?Z|#Ƚ&P5]+s'yiU颹iZ[gq\4.~;9q6ΒXa&cя,ȿN࣭{Em=-(U(b[uOXˆ PX^R\P+P ܓ 喉mFJ#pm g 혽.U@IPc\I?AFISx"14Ua?cKzߤcUzM2_+HXro3i?bMV! .oc;+^|DV.Z [(׼s993.'O_A_iN|~W雸V##D'/S#D¡hB+M͵'ay3jl6sq^ /FkS2>a9~ZpMێQ]_6#2>Upn9G_.(+Y2p-iDݍ[ |VlK}ucمdf4Q]V\%"uj3qg'+I|~Uw.ߪ$C"pJFe:2RR =yB<f p)-DgRS] <_(Ӟ^Ic $ADCqG.é[Gɨ (~78eRHt/JBl_'8Hq̙B͔ z\XkE#rI2\_` ɐf5hn;(zYyY*2~JE%'Dɓ=Shte`2*<}pGd-*5bRo Iᛯ-MY0ysmHܛG.fVܦ! *Nm\(굵J|Q(W6+w4zS{~W]v1+"\~>&O3f؁*A]+8]kY>,b>$P'<٪G ;i}ܗ5ڭA^/ͪC|ZڇT=/ =;9˼{jP.endstream endobj 225 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1018 >> stream x={LSw{]*cCǞ$[>2&RKֶPcPzJ ב2P!!S 1lb[۹s:,'99'9{TxEӴX_o'l„E\RH6~xs#)M,Fݬ+) Kii{v/ʵfFm3Rmj|QZ|BFjJOIlrK\ݼMg-峵R[21u_rSUk3Z(fbUPT CRQ)%ubCkQT5u.%-dkؠpG &w Ԃ=0'EU\2_|3c;A:0^QQxUHb##?`&']Őǂ^2AΊeBmH4-"4A>:PÞ] &WpndoCD>x|fa~]uF&|\VH!d/%[}MC+)n= $*U8?}lV#ݿ^3(v0vp[σ⫀/Oe2 . ,S N`= Pµ D nEP11H1G%v'}b5?~so279Pb7E?'?/˸@H9Hr6y9prpx?euu[4Q(cH:("Tt̀ ş@G=!~Hnc M*{VC71#rrO~cfeA⳸}}]]-G |,FcM6he })A2Ea4ĭ"/mol,`I `#q[FwVs4@{j2<8CC1=Dd&c`H 𲲳]BVuvɉv&% {|RxT+ҧ> stream xX TSWڽ1{E E#t: U*jki}*~! $` \ DjmuG[]jz.=̬:Vr=>#(@*(d_ ? C#OAgQ ~ 1@y\jBTDj4b QaBҘPC,*R.T(=LrrИY򄈅f'G)"҄=] CcrcU]҄X,%,NT,Mړ35lU.ixDdPhY̆9gnܺDiqQ߹/qox{^A=~{w]k<JC[8~I9$g`եt=onEB#P;=1iT`2z달c~1/]Ʒi\U42xZbOœB l`,.,f\kOnE1\C/oEngMMO>)"Q M4om0s^ ]!IkpTU7tn7$Ö&`Kv> :сI wVL8^Ȩ>d}v?B3D% /Qg=y.ػs:q#K!6mi.9]TXwyZbsjuA۽&ů_  D, ZE2 FM*빆f{Ǔ}fJtr^IƄHjdND,[~:PbChjÒwCH2kUj-m?"?xO=lK'9{ FcZ#MġyB旁 Lj6kufQ*5}V'] Tcثo N!\{oe3? B w2Vz[h7M{ 3!Nnh}xwt"iAYJ(A/V,̊T'=JiQIp d@ung)Y2f]ȃT<j!`$b} e^b6ݱaL BHwʔU_ ,B{z 5Ҍ-SukT!7":ȨH$˥[5Q rr0k#&n6J@i41I5A7Hd9?8y"{uEj*GWDM̀-ӡp՝F_ZЬpNX Sd ۹k_ !oz:`TtڗVOczc?,bӁVā(y%=>;$Ԛ\52ʢ⚪L.,6D)UF% 66GgHZy$qʨGpʼ]~*rKas,M]á>;Ơ_WݹōICih0X - <+$`o8.Kw˘I%HҌq"yh4CJε'z3XIW$ꕲD^Y[U}ߕ,py jLjZDfZɿ.4XI(yPF&FۜX#S$%pb'޸Mơ<%$"e( M'}8= 2ƋwDhN ?=n=lNAR]Xlp@%-cpP^vw[@ ӿʌ7 v}]3ngʓfC_&[6Tv{6 x?0+$)c~ݰGtd906oس񹬻%O?' < ~6Td|PSDb!M$%^jغuY%iHZO .s 8 $q~R) 5e򭰗bsz7Fs! `2z_[~dhp \fuu1@G,o6T-}Zc &1}Z (3h5z =QXh0)Re?[Nx4Z8v?bTX$egMC,|Fl@k_-GQ4Ey7IO[$gԌƐ[lz [ Af; Mב/$3ݽ_w ?.U/l&>QSؿk_w~oǜSk^W^F-PWΒ[@I1js^Q])qqqUqvM/vMYh1)S|_nޯF]ϋcuZfqrsXj`&QkHEk[Ovb9q ݒRːpCۇb_ CƒVȷkhpQlˏﱇU3";v{ʁN]zXI!cL#j *9]%$(\+;4~a}jQvDžr(}Aendstream endobj 227 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1666 >> stream xTP厽!vN2(@Sc1h0 6Q@8Oq{rpT@ȁF@%Z*S"\QL4MG3;;~C$I*^cGKIyq#l a*S/L|U,D(%PV[ ,/Ѯ^L:**Zgf.C02%n&PXo˺+v< ޚdn 6W7 ڄ" V5XAy n؛`H4m#D"A$)D$GXIl&-ψH *w&Ԅ@\%! BTPgj5uWΗ J 8|foԢlw5dr[]zCG\:>vΖb[Ĭd'ɢzG%4,PRτr+CGp>zKb=b?l#wtW%q3$tRRɱGb oB8ŧtN~]|x5#ͷZ} ع(,j=-h}՟{X-dޥv?4nm~Z{-}DZP!Qa˳w/j}W)@v$`{ .Ci"Vҋ&ca7>#}P8M1vE^߿} [g/iJ%T"@JH Fy Q-4Z#f:^y M wS>sNMMΔl3 ='ʔ[_hyAoi8/bAB?ǁ6"^'>5L7GQJ&ߟ@a6;ҪHNnX&mI'ד~E')!{oDܳ=6S_SoaS=FW82 ol_TzC1 _;ʺsvfUk8PVu׮E kw YvI~g_(eͷRgY4b0gpvՕ]ZымyFLpZ'w|j.͉?VdADM>*9xWodUPtv 8<12DȡMOzNX\)3O2VFk)Fx.þ#ž'bjendstream endobj 228 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1434 >> stream xmTYPW`E[kOZmq)qVǠ$@$! !!9, K }mnS]isL#8ӗܹo9; "I0 `Wn|&>KE;A BOoYhHW B,Q&W_U i(Ke"UH RJ_DV#D2e\i Zb q<]B$'EL,#K%VwɓĊt JIBHhB@"6[̀B"(Peט a]C~zXwQ]]Z6xqXS4Aahŕ7& :I1;n+/БTzG/3YqKʇЬV f%jw}H݉R% j8G' s e^;tY[eL;QiߒV0j\h8tu.)r ͻ¢lq4:|uÉ<#>"f65b62]32#9ԈlQ9E{3jhVf=RV\d}흇m]*ێ^ B"vk FK\Peu:gN>Kkiz誠3{3YqG< U9uPR-}kKz+K^3d:;@Be )7 8+1yxNE?__E]}we(%u`ɵY-v@ ٦Z(> cs!|! UF`+2MAh5g˸b(cֺN`-\7Fj\X,ANhDh1W;f*/9#==Uݞ [UR:|g˰064dݡoh"?4eꛕX-sOSW&*BAߜ~«l,s<'[q*WQ-^iTelQ<ːS]T Bd N؋zs5KOIPDT%Ϲ`{7d`ZϭﳮuIf8K=rXܿ- /[ ?tV<{X>l^8vq;Ci7X4m Df\1|~ Wig}*\:74\gZ}?ݿzLj:qx(jK]􃡞> k\fNd5'R69=x|& wĿO%endstream endobj 229 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 4314 >> stream xX TTǶmC **h <W11FQQA@dEh 2+2Њ( 9}4}"DbՀ1%Y뭷 4Uu>ƨ#HXgS Kw#@6r33)xbB\?J$QJН6㶎qtr9fͼ`!6ޑ~ޑC{@XqDFmo3;8bJh'Fج q Ygӻ)?CwFE۸0 3e^P ]""DxYwmnw =iǩӦ3Hf%cǸ1Uhf5Όe0kuƃd3gf2YLa2 ,bf,gf0 f8cX0Kf034y*Y%7_IR/iQ jYkVs8WL~GO4?hEI. \4pϠae t2n`1ݢ1A/.-xej*UK4ݯ{@Q*dA T×PNFgVنCLv8OV TNbCqi2&{e]ƠMu7D*vGkȜdcy6. B{ f} d񲈹֢$=:Kp4w'[ɢGOuqT{=-.*}Q6gB_gb"'w<~^,fل[) 0Ͱͤb58q$3ۑ!d3>CKh* 44O~TOagQh?x7 Gx-A }V4K%|~fq<&d MOƣ ]E 4-J%&> <*@3abyW#(Cip1p 2*hNb8T y<ғQ-S7.~Ա"o>u.  Z *h3ro;{s7aZy-k!*o]-yYzU 'k!cZIHP2Xc,k) 4 F Ax 8L1Y 3s.gU*=O`SjP&ʉ #<5>EҠXa=aX\mKZև G[Q]W4糋2/VQŃ?]#olY+%GFO]c}OT;?>3)#%/RAT%%H" /'wʎTu>4:ɡ(Gr.թOѡO5NIR| ũs[wb:ӻ>DY;_VB pכ[Y2LBlR6P,r, ʗkTpDr<3vi@ebv(?XYwB MB{sTRQO?͍ߜ8dсz?6aR9 gZ-Rn#}sWl hsdBVF VooBt ׷k){e-*)*9TںT͍p۸k|̹f@/ #Ƀ6&ErJ 3L鲇u1p6TZ AHI{dNPKY3s.XJWkZ >==En~xAJ,vNM`yBHؗ@#( x{>UJQL'T* _ r[Ikp(nWjZUeZ6k-qYH/KǢcrقs}y"4,Ry%QEHrMQjuXZ*F \BV|?"YCF"2B2(_ߗXE?kYi )Uk_Q:^But[SOhHh4ycIf6dK =1ӈ {6ϷN=Xty+h"K$ݏyT@W?)o]hϪ&8YK"E} >1_ X1XWBV\BFs8X[)fPjs(KB}aI[ ZLIӨ!AҀ5AKz"=1ȂZM-rf6f&NĉuAuz|pyC&DG8a_>bW~ r2!#%}FX1Uņ'&PյH$6+?q$eUyPiD^cu1dļ1nTsTU]t/pqODR%Ѕu&)hѺU/HE-0=ZqTye0]oܤ:i 9 &1IұֻY%N@-w$_c?t54x^k3&zIQ/>jok W w#'O9\stEэBsumn8,0LJPǨT)頄4nwvT0 ёOɪԙ*KHGJc'2"Q5gTu³>_6'1'7  [.n)+M٬%Sf&GsPRaf1`{endstream endobj 230 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 6635 >> stream xYxWC8 SB $ 44wl˽WYQ/n L)lHv-qʕ }{>K [vêys}N&pp?'b+Q|5}Ԓܰ14*gf%J ;3tޢE gΟ;wQе99䏔Јؤ%d,~9ѩsҳv1sVh~RNb_儾:9/S3rsBצgxEKӶ,cUyļS(]"77n3f9/}Ǜ -MME666m{777"yyx/V^oo-]+|DK xOF$⼡rH(\މM&L1g!?KY]G\}*tvhBN-ޮ>GP0(MNҲr'mf@j:,bwvs#46xq`~vY Hw~c7쁽NŖrP@fYDyzX b(UR* V q}R77v|T n8lM[ec@2[,e$IZ9Q;[w׉f PJ@W3lr4S=h(>޺h[XVKj]SZ+ %sefZ1P͐&-h%NWV$ԗX무?O orB;znENPKٕF4Hc/**-]od6dB+-3!omjZ_eNƩ4(xƑ%.!!w@i,|;p&D1(t+xmI۟JwHYjT׀L0X#"7Xj ,pNk'۟{܍g Ft}F> h|QP,%DzAӔ2x?')TC5>)hgo^fgp()ģ)`Z\ChϿ:a8C ʯ9 ~e< <}څHH~\/K|t;N3]GuugxT4qaˎ@ywz=/ׅ2m2rQWUG`1eNC4ڌPQ *%Ǭy/AS7y~w˙j1hJFDM4*.2KP)9Ҭ2-[:H5ˠt Xkً[bNg$f5X֨]IiLYOP$*(V3}9B[X/#0eYQ+NvfdfloGK>*Hj!h@I(M],dq=V돂7wi8lsˣ_jcP.&WEFT&T>ڪp>x mU\?hX@ Dc|A_g)5DeE\6P(~8`8|o<s2npO'-'+'GbUZtcQL$<};6aw#\mz,܋i}? $ݹKJ=sDV[/PUvQk`l7mG~ j :֦S !K֯@Bؤ1 `Ԝ_ 7Vxw׿rќ~$&^*yPJIS^ٴ~UqWOikI'A& I^m ='g98+8EX!V<(&w8="{F!zv4eRN׹"fO_έ]y{wWPcU'V&tpqx"xL'{k14 u @]~܃`!֯isN֩H탘>sZBPw-"`jJ:9ƤG/3S/WC)KpMf"j:b!k8+;tCq>Y,ׁnuk=hbo.%Ux6vŖY<ԣB аPe>:'T ʇmL3ez DR2Vzzb ^ʏP4=}|R8 ]!cmCU} ﰇY8}q( J ӾG 'dAKHkr&-k x~\>\!:u받V/uiUgxhHvY",1@5*vũ"6$W_['f-BiH`Ҕa N$0x+*so:|n%0RE -cQ]k}~߫ B5E@m={KUPALЇ^?qAS>^gi!7p>$;s XUяYgK#vD PnZv4z|T^+2fA U`$RPD\k+P*Ujqrým[e|PzJ=AV>w':,)dD=Ơ{g,Lx#}uP6hצ;iPZFG<֣VpXX4f9c!ɘ˘zhI-\)5!8QUYbuAf= =2nHSA&CxྐྵUXk\2URfHG T,lƣu7;X LS.j/u;z--<^jCFd"Ys Ќ/|o.D[+njs0!ffD \KA>AYs^𲵲UH$ټ dP`V+r:w%M@|Uw3@q(@cĿwVIWS%Cx4skKfd" H6V@MT}7&ԝ+ヿE_-,T'bk'p]*T2 SJb |{u4 V{1KY"MC45j"x#+{)/.JZ!~u^6n8#LC:g_`ig |בQ'K\5N#m@㬭2^PQqµYۉ)$ϩnSnt/}Ou`%`J3gF&5-2(T|L|*C'oشDHlKs."zFV'z?)avw7;uN4MY"I.#Qisn6J2ӳumăzs[ж+hwߎzTN'T4 ?67`,ZJs4dM[S,u.j_d7 k sʉV|'܌uJ8RYcv7w8*6^,9 j>ƶ){2;,OqVk9Ů6$Uu@'{uLRKKU~sÜx%t}DoJHsJ M'_d-A#ZBMq;}'tq]5]$l͵eeggKM&pWW玑v؇ak3@ P4%ݠ,I=Q^ Z#ݎV&0J X'Ua8^Bai9M&0ieGV)hRO'e/N ̜SpNkO9;eP”!ŠtDZZ˵Trb[[٥D8eGw9iwwe?3-+wkof*ʊw .Wd߾ ?X>ZNnn.ܱ)*:6^8g4{Ў?n4RH!F#01"sgyCWns;7'x/ S!d"<(84Wx 1SDio fOŇaU@YE+ Ac ɻh#9rWgud(tr*~5_;귯ܹ3V\\`kr{7Y庼 ?rGlD.;UDr[Br9:X.]aP1 M/ᇕOH 9T%ׂyJKuJ.}eVT9jt^PdLy i}[5۪P+!5[وUs!C''7kΒZ|F ;PZZFbDf%dZA5Ä;N2 FYbW8959ӳ~-l(OOJTvz:<5~y9i&cQKc6 {+t凧+Y(QkUx$g>iO[G#P.N&$6DY+c㧊p Ф>nRJjK)/x^7@(ߩlzJ,&ow\ g]:[ JQʘӋl `IðGPi)Ikqaє/h4 W6c{6#p) Vml.W߇u7ATB{@ GMY;:YZ'gЅW0K?/ʶJm)3%vo >b=_Bѳ~{яf~V Kވ\=ԢShf#W;Mf;fz_h! KѼi`Ayb^}S\MSl*<'oQsIߓ)R|Mrٲ?LoAo{T4>>Db"p<-,PYUO퉓3BSARKpiV!/ ;9(LXG쉱` .o&.EN.A`U8zλMh.e DNeD4~ w,*\7h >wS 8,\v,Ή`>A>߃DF6O(_zuk{f1#lӅ^jws7qoyh߲lV,ZY<~^zGHp)/#Q2E7|]*'CI'1;z ?3FԊX%6KUImvkB=VenEq5T^QWl]hEo )KfU\G> c Y~C5K庹eNmv psgĕ G wGR2endstream endobj 231 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 7867 >> stream xyxWa X`:^Ww[.ʑދ,[.6`S@ lm\~de7c[sg9=y;{7J \;i ?~'@o!g v/؏ A CvE88i֬'O8q^wO Ǖ~ށu;#Gy"##'Op8~kývy{9. p\hiw`/ meAօ- _DtײHQ+=WxvYvǺ7l 4m3g˜ΣFg|&Nv谡 eؼaoR^i۞ &/ +شUV,ĂR=;AF0AN}0%Xjh q<^ABQw8nQDN%ge i"[ LM`]Bޗn9i,!7jVCy볗r k9Oa͠u4<S@c|Nn+W4" Ň.gnX.0 * ,zm7{q0^jgUx6ԑ)D@J*"<hJ o(9aLr>xChͫPZV`=#2M$4*-߮Rѥg塡M)ZNJHƸkӠJ&Q$]LmfRd$Й_ ("5]jP?o'Km[@V+騤ů|U~9<~ׇcad%"gUVU-,4kĖWʌD}O,zkk?fz>?~ά;ʃՇ[A8 rr5$A*uq ̛/}ܧu@Tцj,j.r RIj.h!áܘ~3ѾP^5B_@8o&3~Q >YCe> @۶HMMv^ agз,D$ I Y 2'Kc Ιs |F]3cn=TL򧥴D 1mĐ \ZFX<`ixB30W=B4u޵-S7NM V#khM+kGR'D9t61<ĈJDdcI(Udm}VCb,a`$8.Z~}5XbƑ-Ň~#}Vm Vo]崸wexk:FgK_L#%m81 *<OMSZ8*ܮ){ԣ`| $(g0VԳG+׸}%6^r;&g <)3c푐oby7JO9o,Qu6=Nm(~kdj`O8E Ujei i7%tDy6&㔬#=7S-?Cơ; <8Nu ~آuGB$5ǯ˻U#MٝUѷt)ePo[{g+-{M2>^OE؜ %BpخN&u^DAJqwT'/ :TR@:U&2+./@1όצ@TJ gPw~IЌ$žeFb8˵j KwĻλ觫I4q{~5\]Ujg2%F{H#c Ԭb=bK aIIXRm5F!-UZDc? )fPAͪpX! ̊!}Z>Xz kQk]qbK1AQ#j 7r(57J!Ƅ U'==lk)q*уtjV`ޑ [G v*epș&oo׾-ۀyi>T_V]#n8GtБ^%>ʏ+? \C.eL*Ɏ-գɖ{hV&i@gh }UZѥ3# bRREfrHt",{v^zJ\OK#2.w8 t<3Mvau+#>QԲڠ.6,"<ܜcX+f e+wdv%J*ZSˮ|+Û~A)96=]d4+MOfhݖ9վ R LhidMds})܏o0u`ꝛJ r4aY0b~hQ}XW"yh6٠$c7ZGIp=Bla=җáZ7jh$[ W?q?בD[m*tl֗B.hFԝߏ#wd'EJXTa5Dh O.GZ{~ h63ĿP /*5uEFIv~E~`Ȉ\$%1"ؓb!*JemH}PɎzF6s_~W5@+@YOCtZuWwGI^$+owXt_MD4-[1QS ;B,e{R%wk2!/Aɩj|_tlcnG_Z*;TUŅ$!WO=2,:g.8w}`3]29ymT:1[X<7)Cf3D8x9?NݝZSKYeG6rkN(/չ)2eqꏛv\}D@ߣnUx =I2ዿ-Y|jEs1}ygˍq]-OP(]_LU>88:2tө? k+Ã*kk++k!֞NJ&`l&`e1s=h-SQއC-p:=~]yvlnY]@DxQ>n_^ou\k#McۇEؾM.ﲅMյ_h:r͒Qn<ȥ\%;vuV^hqh9]DBn];ܬ Q\ǯYQ(mFF!oofyy.o^y∵[Xk!$3k@UZXޕ;×+SHtx|:]I3PeIb :ܴ5} cI<w׭7>_7S [!8a51{Bٵ%"il`ג_nK(֟d0[]8ws_AsKp> 8MG4 V%?}lgf333kZm5>?h7P>2{:?^-~!dd|^jo6bjU6ta8C  fgk7i"y> ăKIG!RbQ~4aTxat-*> %߉ 2 $x#[h&NWQBzӕ #!^Amz;4 E?=nױU1վIr9Vj`TP^$f똬4̹Sˁ*s'ly{+u^!SY1%?Gk4/7QIcjn3d}Nbed$Œ{  7ezdN< B!W,Zx `os}B%X&OWd>?厡nHΗ~h~V<5?=!ZB%Ǫ͉rST?D6lwcRPɃ!YPVQQ-&^âptߌrVԸj ͞])ZF p1Du=p1lT: +],[7nme*4\JFp+O`h=U[QDL$|p$e^y Տ&둍%'Mmͼ&; *\'>2{J ^wghW<-AmDiIq)KG+9W?~FF >'ӹ/1W3IN95v\; Q&3q# i>)Z* ܶCNQR!E$F!eA wPtih[ɓ8hrk@N2zdb۱hZd$XVaө-<򴾳:䘌K[)M O\2eg\4x܃˿ےhۥ]O+҂9/Ur8D4?4S2 s" תl=5Y[5ff6a޼ϟ:ٌ nGj@éUٲ5aļ&C"iVjL{Ӧzq=aw?endstream endobj 232 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 7276 >> stream xZ XWמLFō8 jg{պݺk uApEQ} } KXeM7EKԶƺZ[նjk{b/&߇{{y{ ,{P@ydG tz޲H}l6Zk<Z@$0(B%)gϜ`?u=;%^rg.p$~|/ՏZHP[(JDQ)r!^$D ^٣ҢE#˙VUtp0Xsg=Wroy=YgZ}m7/j3F!&8w`3ۋf:(omwe␷46hazr.\?k_v?x;o[9bg#7**w h ju>c~(EܧCm+CbTʽ˪PCy[ rW2]Zfə(1x.UП` E_@ Qb2Q *+LW0."6Q Ѣcn8}(#Kw j5M?pd'C4:7 8 _` X`͏߯UK0JUGQ *+iơZQ!djrw4!H?_$G$iMc :xOwN>;|#iX^8z]an-8F6TCF^A$r]O"aۦ]R#jȊŖrX<~=O? tvӝ'?|79Npߍ VtqaQ]U*~8vU&jKVxX}zf =Lq]FpP /[uwT¥OVa,W !(.Aw@8xoSgFfD5 U}a&izgd*S(e/]<{_}G'9<&ĀO ~Sy{[7/zZ^Zw_qLwyY uGbD̾PᇴObXB BF(λsޛ? o/&:aPӎbCJw_?{Mq؎ŽNH3[tJbΞ5.YK"TZ UI* +"V:/=L~_c"`U|߂ZFh am>!1DЮt@O3XNC.+ɒf>(D35M?}S)"3";D+^H,)ۦކ‰Jf{NzXƋ&~2dn[s)e{'B↼EK!y6$ԦoNw̪}C4 %䬅Vғ&ӱrYwnU JHelMlKvGA *U|(6s]JwT fa C<>i3=?i랷n;^^>HN6^L5oZ";t Ym6ןvWt:qoUc8#_MWs.6.>mJ7)SC{8sD=^rB[ 8,̮;A.B X`ܱ{mD7` 4BoR#rT P|ˉoK 8 11;%]%DŽoYKnp0T1J^*TKo@nxGx4l(~/ >B 6&1{Dgz&$?1 ӕUb$C:~ը }:7s+|y]BXvΊɕNR&&=(7(S-( 2` ԝ1TYB)[pPB_j"v'F=դ'k C?aRy4I]?BThUB +@:z"Aμ|o(SiQkώC`A~?mpm]=Kf_?ƵuΦ8}a󵺛l |`V0% Tа#mW5fuja kS% 5Z"@΃u<?G 'ևVDbZxa}B ZQ\fn]:N7hڴPKf,N),A++j0w\vcOŞFyt]/8Rc}t5';n!+BNhiuaSQw+OZR/aVPYUYRwy.;{y6UZSqQ CW8/v1_0o["Nmbr+ fbPb-+P(g7ʎYxhY^S.CVh'^y <[\ʕq|➈}I{5SC?dORpʘc2Rsc P a KOAҺs ut/O9_zQ]dc{Heћ&RѲHL8Cz BS'(zC?51quתDdRnҏ޼md~ ξ-tn}܋;]CN%ڽ5VPo]>\@S#z_' D^E \lD-+έ{lrb{fb>fis]e]hE``hh``Eh]]EE 6g>O<>O l]7R}MY;92y̷¶BYԔD]#sQ5ñ{r/cD甹z4v[O~Srɪ~D'r;X8oƇ !=^(0Ke7SnA(y{yb33HI@9L4LtFk7c5ZӂmW+a^xL:df)6RfoB=0~ j6k@&!qAGSim2zMozd<)5@4Bt[)wpuuq<'?65FJ~'={1u_RwiXfEcD^ݕ颍~Qeը/Ÿ/HڮWvͩ -tv[GiG Gvt'lQlzE q|EI'H!O>Z8TYa0s6?I!;bvr|#e! 8ANn $pt)yfDo%Jr@mMreB%OIoIq"e˓ιu#xw>논Q!G[m2P*3E Zx 34 `wulowCpӁF_=YK}6hVVqg?96 n[V1 B7厊ʬ\TԅJӃ\b;J=.5a_,ݸmu\b(/Ypbg5hJl i{Qzpzp -<& F ͪ ^ 3(XSS/Äύ8)ds}$JCvxdA3 pqB½$ vn{I K<-j!O|HzXԼס]KR>y#wvuUVgJgSPB2N\xraZI&uUjuei]aJ~;1b^|QsTمb6+Nj7]'Z%\b)[ B#N&F$e0 tIٍy-"I yxNGx2Gcy۝L*O{`v`!@ew$ 9Fk};x {o0B]2%5`fnفG͈QU$:"aA̴H^6@fyh(r b!U|F #DLa)BbEeBTvȹpu^EuK$R:_'Ʀp3\ӣadmqYJq!偣\ǜBSeɝIssk->&$& ?5N/ΑWێ_C_3`={͛=cw]\1$pO4G/N[i0$[oY兘HSIå|LvdoBҦzQ(R+Jt 1 hKTT}CGFz[zʸCv^ ,!۶:^beTT%erߩ\?=צF#~gwkV[9yD(z5" [-:[+JqHcbP:]F1dvxɁN@5]s\o.A))v0,~!W[ h#zYFZX[kPVGendstream endobj 233 0 obj << /Filter /FlateDecode /Length 6972 >> stream x]YqZ߰uW~1e)D$R"@aKC AlgѕU=3 ,ޞ:/Y\.ߛ|.t^.~?O5. w҉e^ዋ?"o GtT h?ﮖن`ovr6&X1=]`gcNi0m켛wba׻+;kEb;=ݩ;c,}RF3)C%ajb{umHe;Xm3ӓW*- j:N1g|cS33u|ezIYP,b\逴]T`/7uwu|v3]XX9 `Y5z@"(ĂG̼8?_$|R:+J:-P  LuD^ie?#b6pd߯%Ч/l2/<ŧj ][ya+abyy%2tr/m{!xߒ:tM\\57KeD^|[C5Hg%5VXfy'T~bDEoӛKAVMpّͬ/vas C}O_F"N}qdD/lDUR$ek+~:_V̕ITʕDC[VbXL>D.HQ BJM_hֆlZUˀשv2- mDϮ r (W?q\FE>.ꥁ^~P:L 6umMm-u"aZL$yyЎP ^HeXzvjDYEWa`XKQu$Rl!4j* ֚uQWW&*\E[uzoxV Oh ~enGLzI5x%kxSe>dDk£52iiIGD;GK(g#u6hxŨ:dQ-Tֽ܊Dh^~G|qKx5@@{1+# {5pb]$D.Ӡ>݇zj'K|eG Y%wW`w j ,nhX7ӌKXIJ,͌/['pE.mΝc 0C:5i&nywPQ] Ca[@yjۼ0i#80aJs>!Ȉ0H;|6m'-xmH>Ƨ"*`Ny3ER.InLuy!AUv(&3{YHa LS]FzPBQ2҈j'PA EEb9!S-¼J])^z]䍊T"oT,$5 m8[>ވC]_71hP\=~j ߆oنBIS$; |@+"BŪMLE\?N\kFaMh+i1Aޡ$PU J&@͂Ċ,͌bX8IW M?7b;*3V-ojꬽg){!}R:DUiܶ#Omu|\m Jv!JlA2 4(_vE.E.a[P,O $ !Jؐ G+kAV31;w F܉ǰ0HMm0ר|Ȗ.h]$|ǶvσjW|Bo.9E'u348ja]8Ce(d] &՚q=&s&60ŒY@Yoi$<@PEKʊcL(#ྃFosRAm`6EH J'˱.èeYg*`!ZkYh!;wkcl}SdiyjJx# /6񚟑tY%CALQ^XWdg9͜&6+C} >tk'+\ީy{i)67ꮈ)Ȉ߲F@rJ<VHIVgçq eF=OW{vaIzK.G0Ja&Ģ޳Xb|Aׁ;@M_n$5(1D .cxH*7Pj%,1TR/Sj Ҙ*.[@~=#0d6O|?Z"=Ùz],Or"7B?x^MEN"Gc:;2 Ԉpo6`]x {" S\ثb),f6e X'&ק/gSO* "H~:}m}ݰG[n8˸eWdI֊:>~0nEg ߘ.␭ '@F2˪P} t6vD6@Z.@8 +;{\k- B^H9ЂV֌@VpNA%ARGS'[36]m/B 9ܰ$19lVY~g0R x1,,^6ҷ(}H&QE (`(D `样OMJ+0m[;C8@Zg1&/ tt;]uա {[%b:f{?'" }\Bkdh F/g{/:˶rѺ+\DfpnuOF]a=J!ڦx@u &9鲱P$ɔ 83#ؗ\cImP05te$݃1:SRkDj"J:i_DUu_Ӥ _>Om:H\#:խwd|y{}]X<% J4l7]R+xdI>{(| X_P9!a1ֱYS:e;l,TԵ^·@4BU+/3ޜڼ BM(:-[}~[%*eqXJyD*U DjjccI'CV[:C( Ofz>.۳LS^Ė&,l._ 4QpApؠ+B􁢻l6n&'H5V!S@AE'w"scg"5&v· E ĭBt5Vl!W!~"'l}'SFĬ:Te&Z*׍$dQ/boܶWv?V{H}cY.lԩڇFvo =WwqPAcI4.JJoʖז>?~h,_r.vhiC, j;|pD G7 DX1=oW͞q{zlҋ:Lv@Y4A.[7s`59[x6c1fds>E=>1-Hֱ -J3"+Co&9)oqyFZ0E+|f 1f@Wb̏ʝ&p23(c\¸Vdq5[:7|%L  ;$B/Yg`vx`PlN7\ 6xIx$D7Zζc҃* c)p kX r1|*XyxD7+NJ:k Q$UUSLZ9#0IeETI ^fnneL#԰Vֹ@Ɛ,L<7L8qs X?^><嗚\%o\"OpDXFrb#oL16N~F0lŢ I~l.} Fz$ t'ƨHΧ*V F"I 1Jk^)j` ]sF-Pޜx# w Q!XyǡJ*޴G\'|[X5֗ -|:Z:3Ύf3B1H*Y+;.ìaADok.Az__j5GI.T-}皝SUXcQ,nlқI AEmuYY-LG)r\mluH.[\~vv:5:`Z.Tv͋O?䳿\y}'ߧ_/]4J8C*c}h@ky|mNmt!_C).WγZOX+8?r*wD}*ӏTS7 y^ذU5rnv5 OW]ZS]z Xw%4<=In;)B#)=")YLMkc _\|Jif_OOFJɾo¦~ȄY=|ǒ׾>i99Ehv_{Sh|G{X~g,.u6.6 UVv:Qk)0#+3_\]?b|endstream endobj 234 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 196 >> stream xcd`ab`dd v 5400q~H3a#ewo_ 0012:)槤)& 2000v00t;gu`Í ʅ|(YQϡ;çg /xk76i?~뙱wh7{r\-< Fendstream endobj 235 0 obj << /Filter /FlateDecode /Length 7233 >> stream x]YqZ?bC/ͮo"%@ ñX%{gѕU53X @风<;,=?r&|O2+ɏg"_4g߼S!x^VL>υ텘5nްgm/Ԥ r3f6ߪIYjzs2 ^Kg .C =S^3-U֩M}>*oټeZ-!]wiJHp|z'7ފny3i%['9;W+o'!t=?K5ɷH/ҋ$b<*A){SߥC7xi9ОԓWuƭdȓqBzH^OfV*xVLIr-7c&gQkƋx5U=g;ޝJ㈃dʉ -I$&a]x6OV'=\h_ 澬\ )2 f]ޥi& ^Uݰ/]Fd;5V ZB2])~H 5X҄w^qC`Me0rô }d %O ]*oxSVK=xxqiDt]epY;-J`%?5äB P6wQ_eΗFzC:=lW5(>5VM<]-<;z_lZ/LyQ%SB,fp?@N qn' [5.nbj^B@$Y#Sz  $ "^8,;VG'yg. s,Ȇ9cN\skNMb範,p0GeBrI ~_ÁC_'`+!i6>-?}63 ~h_СT[w&d@7,]ꗨ0ra!*rM$DzinG4uzT8Z윩IM /sm#m=堅͝ jbήcqBc+ l҅Z6f#$;J.-'ŀ9n'>w ʜLGӶGaܤM#Q+UP(%`>uZd^?>dfJ(pSr PF*2jNv֪ ΞIy'/0/Y 8@cY-f@`*n \T S]=wW1xH]4>d Z]4Iy-u w5ዘ0wQ7IGKc.i 3Luy>w^WP9K g2ӈU^.aHcOhiq'SCJjI?/gTDǼfcPJD˄ YSEŝw%DTwva͚on a#}eAQ.:ON9kdXc.~v"$erZz,+O)h(( Z5`n@ؒa\ ֗N^՛ q"} Dea`"ňs]Ev+-? wa#W> sh^z#@6u bzs/=in.ԝƚDީ:QS 6\@1Nvտ=o1M`h' q d6!@z%LKNoi)I?:#)x|dB/`]7}I*tkh=:ĈW!ZTy/Xۨ^_eՆ.[-ZnF(Gћxdfb9[/G/uz.V cŽL HΒ__f z!1őt̓lpnSus}B,_Ͼ8 QW6&&S9=uLʊ~?;^G.gt홲ɫSRȾhv+g[GEHr3 Ց.Ֆ }V7AT#ENʛĆJ"\at)_Etnm7*xŗzxGlrq~1pDi.-TynY7k&*]xf ZE+ (1",TxGcL1?b C|I+K,1pMg#,&_R7L]G87teD/QʸgY dQ}2 ֗8a|F~|Dp])x%0BM=zr؅DRPoOjW9 I0NQ*9%Dnn,}ǹh ji .إ#.`Ӫ|lƫWȥy_=r4:K"j"sTAZh7M1)Ysݚu<ų\tJx sC^-1֋IRuaEeZ;[aj>W4,1(`D<8/eK-®շ&ЍL)^nFln]Zoիb{:T13'g:)io Zgݴ@@AIGY8qC-+"jwMJ:5E5o>US$[P0,y9u1%ϴ<Ƿ8?p 4,vq|ѨPj[! MHj+rgm=F]qudU+fNtO7\n]=~^׺iat~wו}QId\?NQ7Oad3Uy@D@p-k啠 }o|*lJ%!8+g^ #5mPfd*Rs;k)D WU~_Wʝا,`gRM'AVlM+ŀ*Z0ԥ X}Z! kS?k9WUJsfce W zZ9 LWY@BoP8CRx Q6 y=q7Cj"5*-ADϢK2p:?]6\mv$l8ZGtʈ}U\4}ώY$7,4Vg`X:vZ!M `())l9a?RnۜNVJF0OLl<9veZ%q&ve"lhޫJS5͍&9 ,&IpKIcMr5'䯃}ޑ},ͱ1w$T_oK"I6͚LTf::sO}x2jll7M̈G.(KK:vֻzn,rEcxn/-Ϯiiw f>䈂⋑&mYgP%3\.-D-JlEG &.K9k4OiW<`+yՎK>D/*D]נdb(F DM׶q?G\A}~o͡"IVĹK2Ot^SCZrگUE-XՂ&1 `&l9nXJ횆)-j˜͗D4M[6%JΑ&o~C8,~|eJ9U:9[Amy9zjoekGAKLH"PZ =\xČTͩ:JJYujULDz&IC;[ [6w4> IrEEEvڪ-668&Q[]Xl}oG%zfA'hLgc'LpҌ1)Dti*R$)g/=jIjJOZh.UͶ5v'zilF[ ]=uYLyA[aE"BdMaAt\iqPbn#9BRH4f'wbi9[J>$0]aCba6~B_dhT^ʫD"8:}p3 D˻r|R94୦G(^xMl_D||\#Iàp3;% Ib'gi!٦KI*' W%}<ݖ]*4^4ͪfTS>AJjwgPlb+u:69񼱍>ӼT":Xӛ9<ExZnZ+k޻Qzd}DhY1qzg-,inu%A\1U|~+ GuS|hȸsb`'Hj kQNSeח/uendstream endobj 236 0 obj << /Filter /FlateDecode /Length 7766 >> stream x]Kqo(6tqcB I mY6a;Ba J~3ʬ ؋PD_z}'_}T/~c-k8ͫ۲y_P_.+owUx.޶˦x疸fb/Ю:բK70:hu,)8[`-ȯ]_Ӵ| #=v7pPEs2Q\epw(-]ob ַV]-<֋5(cBp4fQ޵JADI^ouo'\o {kjۖo 8r=R@C{BCe[$υ9#FfNNzJGr7 e&Hq.& Q Foq;b;e[V=_̭z ዣYu8y*>6A$^6vA! zP^G:vvv7`Ûڶ&#m(BuHb,ޜbDT l\eF-nkWI$ sOpXf\=*،&}yYЈ䰗ZoC~)PEsQV[oﱙ )zxq=z Ӳ,y"18E'v#2ƺݎEqһuozuGdM9XipPG?GϽ]}Q7L t֫L/Ao=@p,"7F:tᭈ$AS;DL tgN@\ll g@8tt8cjLL܋ě+'G @ >m7 늊Igv|Uꁝ``C"Pv"/B5_eFݲIX|+lčmEY>)lR"ьl 4k޸,8]P7:8uc!Z1bz DϏ4t+%:rpR/$xQV?YbFM;m$OJDTwh/pQH+7!=: ;%)OPZ:maY&?oi" g6j uHʛh'fƃ&LϨ\ICE=k we}hoIM8}Ls~uI.@#O2;S ΰ!QJd;_1ԄND^ܝtICZq&x8Bo94LʊDF;c26[r<^Mhz&+Tk{5mw-?J\\{.=;BlbYfU\4rBc0,N$\"gLRޫ1r7 "PWiL:&ۚ_6dZLw6MpXǼڦfմch3aATĥ4iqKGt@KQrBCx<ns"mq&Rz/<7.n\X,X!o++r+"Lqk73jЄNQOm!枳"MsXFջFLZ ,i̍V+)`g8*)k!,.O;WfMZ& zlSG;XLO’3*H6Җ F}Dw5fuI$BDh FQ{9ǵ uKeEF vIj-:B9"^1=s!0s4g, T}52do^@RIh} =g-8!g[vD ։$K{/v{l??n?~[Eɭ΢sR0/ c;M zİ,ͼӇޡdGJT gW{ 6|IFLM8.'BXJ }鞗Lk^wUEH`'QIIv ]N4tDх-ugWY 3K])DJjag CۣJY&'Ái(:9R})X63/zʥSRf%Eug+Hb*wN]~…^rJ@|jO:,SzLpZitBgFe$d(ch7q:(ƃ@2xfpOCe7.+؅ Z$ꁗdIsZa?Ljsj?ʹm*&HVNmS0 D df횵3GܡjU\2h2|hB ooloV;Vu ԐjirajB Ι&d)~/h\pȂ Ax1x)Q#XxӎM b;o5`J/įJ&1UYm|Zͭ#| Om_>C.O%lvn&}+XrHLR *>ڿq!νOO"H 4̍7ݱH%zI3RUn֊Dh2K=w)5  3WsQeXT5+I}3uɍQ*Ba:y`GI23 qZllzyƑ,<%d+qMҖLK["q)#*>q!MEBOG{o5ͧtί{NM8ۯs|lJ^08F=M4!CpCxB~aEL{cH) PFvq܉{Ro}݌0 R`abqM(梐-yGD5 p MSfӊSU㈊n~7Ā<]r-mk{&mg6}fq`eX16+?.^Ʈ%6{@x^Ѧ9evW`sg`CEӃCi/.6t'] =TWE!tP{yOꀭ\àh6/3gFyF8dH*ug85.u N%Eb/[ÅΔ ڱ6oj/M#\N68qJsMB]_9g7fTL LWQyv"-e Z &Z,޹$cAߞj*l ɴs:xlEprt@ڒ\3v9,$Jk;YF؉Qy^ܸKNzȞgPÌ5ۑ I8w_BîK,& I&лmlCyi,x Tlg^j~S&tWG|0USs#HPҶ~WL-Oq"n~%\6_sP"ÁaR F7JWh)ImJ/+}XςD$@a|{:ɹJn: 3QM,UwuFUW:jehqޓb SZ'ס>sO?OKi|ζ[2\ v.iG8pòOH/ J XP_odrsR7ʞw(I3hao~Ŀ{!;8Lq Nz-a:TÄ(6$q ͚'\wB8sMMcl WXeR.ޞ5JkԸ9-֪Γv{.tSPHGu> fj*tP\XB/OT,hAD/gNӋ%kŻ BL q{:vϚQ/QJ[$:&VQ%sð%ᶑbD4|Ԓ?`k+v6W=>0qH~Ϥ~+L&ڃ|d AR;@%̩%tW$?edY]!&PQ^Z"Ppr0wL:'GƷT1x[iS`(BBN%]1Iuܿn e)g2ƕvh`WN5Z.JD:o[~޷08^(z6Ml ןH?[_o, ߍBVfe) U~9v2bO(cbVCk=u#IJ ;fBVEMf.I?I&5)f'@Eu{:GTȾtE*@sÝbSv_~< R\2G?Atﱰ l7]uu#P`s^+(7vmw-54JBvb#iG/)t5=21٭ +[LH z@h!zrXJa}+zhNmƀQgr|O]_&{[j5Nz,t6~/ξy \ N^)ox5΢_r.eP%Si:\k0NH9ecF6vs˥3N?0RDxdlǢ>:z=}b0Me7;b S@6#DBT̾#8('Oפ݀DZ y kaw&y {Qzxm}Ө 2dUĵ^wԐtwЌk-V+?;1ZD.G  fO5cjyJP]cA&vF9Wo$m"u qzv<*y>_ض7뢭FTGϋ6p8\h(TVhՔXx3튡W?bӸ 9JfFܿ}ig<hk-z)9-<(e 6طχQ{hyo;kQ&:Uwia?y^MW(yi-Noz 4s9s֌ՂwT?`BwfnZsMqrTݺ ǂum}<_?riendstream endobj 237 0 obj << /Filter /FlateDecode /Length 5518 >> stream x][wgn~?Ѵq8bg$n402ukREIUQ0 \hm)hU~Sl^U?_IlpyQ@δ(aDi9$!Gnm%73T&+/-?PRI n{i€@6c<-P^oWuy|e4x6ct{WX01yyWѠU_gv4`/z QtYvݖ+":\Q:&xWg*?y l7 Yt)4{0C͠NJV~(gC7䩥 eI{l GxW%eEZ(pr ËvF%  Ў,oi8J="aKfF@P}N 8@Mח.qmg'ƌmK [dd8Ɔu^)zFL(MSGLjmQk5myqZ$NZSI#JAߤѹQ  . f##ߖv5;qָ.Q/-Twía^r[Au3  'uf!uвqI)ނhF~ Qq g/XR$[j"+02|a +e4)EZsO0lX) "(Noqootcl9 B$΍ yv|\=25|R+ڟ2A #ś}<ZР/ 'ϐs1D(.ڼӗW5DuHi?-.u;OS7o=t19dR6sQ6ꮲDZMq 6`:VSz.4)o/]y{օn=iSy 6d0% :(ƌu6,t щyk6wH~C}w"`-K6an@Y_ @L ̀jWwO]iOݤVQ \a(oߵMt)mX4;]sW޾&ɺ=s -,-|oʠ)2|[޶}+/le0iOL23¸-f?`䰂%[ 83FPM6)yX}Ѱpw:`cC# Х+`!f} D lۭ&Ubݤ1[%m@TAS|.i1eNԉ]a๸^G9_g0(ebʐ YnbWW?/pJViN`֮_5Ñ2I}o֮VX1qBBG`#Į2^$n+9c{Ƨaf tYN;1iq/! 0]1IIl혌Lpi "Ϡ 2sZ^%H6(~HM: ku 9p[,$Uzc#0K|B5p*5($4E~#/ y!fUIIsrД+?B;v9k nQkUE+W UH@fBX"氛РXgaQ<= ýU˓Wʪȫ(G2_VYTwdd|ӗZӿɧ|񿿃?^?N|^R®2{XM854';L[|{sO76ʟy!& g@m8):w;z:]v-k5:gnѱ l.X3Haúmٗ t*9 jÂ2i'edBykYގr{9KT,vC:3/lHzMl9%iF i <)Y8zYk4M~=#`Sq@J]`5fha5FWh B[k/J`(0<:W@r?eTu= ۂtӌ|?:@@r ͐e2:PaR 5,Tnhڔ,MnH45(M"%[HIz{%n`䰪2mM:}xD~- t 䮛-ۦ6o_v.1c[jU?I6|ɷ f#.&PԑDOƪ\Sb8"hm7ngttL2y|A2FW !~uG~uQSK,LN,cy=aHmw?X61sϝ  \ &q*WjQ:7 &<_R"` +EjkUl4:z^SSҏɪ%&.tѮnq[oۍ~Kif]F*>|#U8BQ/c G0Qc 2SM[rX"2i)1kQLԤI ]j*~&jS])DtV.ioo Xvp\Wb_D&H׷J󾳭l ,z.;P?3IإC$=qx>zFF Ty1MxtW% prJELw꘡s"KŠPewrmc8&^pHu19|H\0ѝHgVJJ?l{ƫF6G O|HEzD]xNkzaom;[SNIje]7w߾[Xd螠r} }8m>vExa`tbuW_%2-_-$gX{bIUՀ>{X+-'b\#O#WGnOhhVHj{똙d^cl)/KJv$%mc9mrӀM$}?${4x }9.8$RcxĬ{0 C|@{4*ihcc2.O^%0CPSE-}H'l*'6fT6SkBXd7/PBj)gsKqIޫty.q( ٻaOLϖ&O´ow!S5xʜφ@NMyK{Rqr仧U <@,!6fC_84i@Az9PTl®pg$i%Jz$Q9zH&fjpF:ZYtAR[bʑK7tىH*\}iY "/YtEaK^D<3u׈??JhLH+2cHfUO2Oú"~HS/&R$@LB.(ojaET<. p˗0!@&8D@RUsQV[:RR}D#0z.'W{Y:{hcFe| D햧b*̊nJ/FTUh,үc8.]&G)1|_]NV?D3n@4:+iLJҴi5}ݑˣp:.5;t zޥ"3S(jL}SLKfO8uDB fϫxW-?G_AGrȏk9OzӞ|xKnHQw ?v~RuD:ro B5V'bzV)Viix9Z; N|WLّcF"D-a4fI_FDg+sQ/T< m1['-6#ʀ0p?W,e{]VAXf/K8j̏(b osd`&_3G`bkI~ nЫ(oF7~M8rro1IF]gM,'i5I55=\,k,B[ED:Q>ie@N^i= aOMWxυ$$,ޮNvDK7]Q 42W%Q3 ]Y*6dH};]gT a҆&@`Un8 [1gZ_YE:$ze}`^<%a^ a#gC{!3 HO.^$a~~9]8qiwIfßަ{yz7H7ܚ&> stream x[[o\7`,c/E_nѦ覗5XloO3_/)IFLhG"EI~d#_2gy|.navɗo|6|vil4Fbiabjx̆Bk/e=r8)iG93Zi8Yu##=6r(ȑb/[ Y\V+pĤGqȍ+1jμ9+ĔĬDppvF#}|F===o~˝֜|@HIR؋G*gҏ.F~pl9ZܓB1=Ecn8zsIF=+G=MCޢl sjV{zt΅Ja=;h/8i |Mܩx\*5pf9e&ov+f=/A: /quyxrH81 JA? 9 5\`& x}p8U":/t3HNEH?z5{tIReU9܅VQϽS 0(2jjR) \%fߵ-Vu#33RYpHe,Eo"-X%S$(eوWw nFQ>}O+5MO8pG&G`_F5H\G ,\lx>'7mL9+PN]1܆78|ט&$Կ?2|&EyOT$ 3'm4No8Cx 47wE_DApVdU$9/,XyoӢ gֲwmA(g a Wq' dᅫDK ޕUS"L}`O ma,CJPl8.X t- ~suQe 6]Tv31|/e!0YW8 u%!qMqVjn4t hʼn2$L+l@DNӲ d1=%  &``0e,yN0'|;P*Hy2R+"2O nmR! E d ~8\3vcvkm ZKkx!1x0p<`HlNP+Q8Ex&` "Mޤ^7onZ g#xhfS&8VZH_s2؍ Gmpk48A[L8ø`_uW )tV1TF0?3uD\G6?rSi Pg y GVaI9rgʰݳlx`p?g:>."H TO!kcLA`7{fSU/G;aq΀j(6JQѢNL:*b6& (^.$z|3jd&V Y[ ~ 1Sru#ŸT(DqAudדdpg]&yIh CZS! wZy"菕"|'&jȅ8Y@幨2Yz]%/MiEߵ0;3x ˜ISl ˁx#o&tAܴµ? /UK&2} ON!)OlJ U3P?3L\ "ÜsjdA 1YtA8$Z"a㘀14HJ} UauaQ P 7[?oʪX.ك`i Veg;]k%x__=_" rsdfRQ #f I / fYɳD~ %IK8` gMu {BJH!hGEe!~]pUf/ev-̾{C+!\, ()/`AuEדr"8/yuTaB-~<쮟l6T !Zg%Wjqlv+KM@l̶M zʓ]NG˟A-w=;MriszдLMâfi+2'= _|-A1YA12 2iďKQT?-ROyCMI5m& :kcKD&SEUK4-j6kpJa{͙tDFHqHl^* (]e54`i.jojIRJQK|Q,TFC9I 5LH&L{gë",6`[1 [Yu";V=-Ys>l.bRM4x#Zx.bHVVFUMWq,z1;l,H=5sh|D+S< U[ .iZyT5ҭ*K tu^\RjT>-E_jRSeh]Y&'R>"86ylCʺNƖͤy|{M'  BIZ"~20/b:HjV3ĕ kҪ\/wx Gm v22ﴡM\ΔG{c.'Л|"kI@KnGlp>zmCIb%IVi FP4P=t>Zm3v^r]J7cUީ ?a<_x{~;4 E{ [hRи1 &RRc9kw8ҋ>a("%Q!Yuge7Ҽ4m,҅qF.{Sȷ}mS~MBU,w^`]1>+&5N=uħB0|^ک2ȑ*ڟOUݪ6[U$VlV70~Q7l$Jɤuwį׿hIhvyzЛ/.e=wtvkJ> stream xn1Do(/8pر`>b$?ФDm$e,TY=å]=]u_?- =9?[8{u.ǃx,>>e/0jqV,,gOWl /Vl{V|0FiX nLgfy:ԃsiw )p36hgd|&,Iz%mS˛Y#Rg͛`JAHӀلo$\ftDvpH9PNY)Pա@ǯe~Uyvjx ` yW\$sX&\y5F1 ]:km|lT+98";v*3luQ9sZ 9pxRuY8|nl Y G_ַM IOqݐ-`vyt%X6ǹC ѯ0 /"0CefqՠEU@5UȂ*HGp†(h A84Ws=D2rRŊ ^@PUh$c1.|U%tԆ D qd*pF`VHJ"gHϕ6Ѡ,Jt%\UU}rj]IᗢG ȫZ\>]]p> 釉 &T1rZ\A.E/0b0V&.I1-[}1>0'H4*lj)eA*a['!/QΣmQh6/Dx `sVh:(Nݻ 98Oi\ӥalxrEm@GDA L*Cy /QJRuf97oZl6s8P7H jk26UZ!5CU5_yU3,ooA'ZDߒemMlRA#6f -]6Sn_N`nrtuWeh3#Z8P@8d21MMa,B$K x$>$ !AL-TQY-^pS& >1;vEO;( -_-H&ؔ%:j?H˪6o'HЉt4~2Hk /'8qDHimM/# CBC,ŝpA\$ UVK3/4 ([eWAc}2Q{栵cUp|V2nbbfTAP lGے^*XĘ–[N.ϽsDKцA%cA%Z.kx/mݐV6=+Iި(q!5&mϭR [b&"'[mY:IDc2_K #E0̧2(XrSi f޻ACAw-dxIn 'pw+L'pI8]**aA4%G!"m0ؿI~c1 ݪ[!į⣗Sa _@#ʱ!M[s=$*:@(\#;J% fn6\5 npFKqIjlL) jkk.RU.l.Cu[L j5z墉#+ pGLƅ038Ivnu{JycϑntޑL~Q 誂ߪ-N{t~!!;ʸfiy~zt>`+~Z(b8<|_.n|݂|y>}@(Z+Atil4ӝB̴'4$;H_~Vgz6g-Y{}{YԷ@mŚ`.Ho|*y\iay{\OkM ^[ Z(\"B_ Di@$޼=4v#Z =@ `<A cDҠtYsQ]1f}@]ѦѸ~]>aCMdo t`;RF'Hf; {lnv}E4 yvYtY'M̍y QBԋD0zCr;*d?' x|ݸ!x<]U[:NXAln'@^]|b$}P$Tjd!j.i6؄#9fP0“Mwt9.xbʻ)*΃TQo:4L;hUpJS៶x )\(/iB JhnӈZ &e &Re8M<V_$闉LCG+AߔcgTM8=(lkK2:cmrI9xkҝ\XZ)vcZi`MZ7q<.iƙjb2hܘflqRwnv G~L֝Oz5.^E~e}l$ uOk SI=wE+yJuXn,7oAZОIͶtݝҬ#w2̈́`f8jl vrd"Ɏ(f?ncLӟ{p0eIob EH;8gnaUzvȖcQXz8?<Ԓ!prOq-aI# Nh.;~5XvU+Մwi5sn2Lb DcX2T{IO}UL M6j[Sf*S+7:JdcUnZL`~O 6uF1Fг_ڑIa=y9Xd+\u&TXMC8HQ~DP,*wY3elqj yQ=.VhJ֛hD^;Z<3Xvz|)\>Β HO{C_{5gXjǹi^$<|Z+}e3.\ĄYULHp5e1,&I${ES]y'g~6d k\kIq4ӊQ|ț&t /g_]4oz=KX3U~IrJz.;pv 2UӸ="Et$y'e㔡t.z Tc -8f9N7< /'iL>V2@"/\[47q*Q%-KD8ރKzPϘFve:t$]TkӹA7P {!շke ߦzG3%d([Xͨi 8%|^[˂HSғDX>+Lt{#0CpA;0MW5B -ؘU0 VEnf+uYƫXƜ~VUƷX X_Tɸ޷&-#=ʋeT/"եʨܼ@^8&j٣F *^*ԒbrS8Any{cQ}WΫ E:ZAxƒͦ?M(n$xKE6bhqO:L rZԷfz3}Q6/4!n!ᄀzFdT3TC7H\_AxQO%Q$-)}R##!!04yp}>Y{S߾JTo wbYA O+wD1k{){r +smR5lGά;58|w)[%l$%3CfX"k3WC=x;􃚡ŗy!Y,V~p.k o=nk ؄kOKf'ߝ(]6ϺIҽ<.3 bvSfi~`'t `fgyZ߃\1w 'vmwJ "NmߏSJuRYH?*gݷVcmƤQ-wz=ܤt&]6M{8B~KƍH*:FJ ,p׳SH@~[鞣b.CDBfjUEK=2jI4X!_' pؘ, W3p)M&[ 'Dp[_4$#k['RGL P6|M&%I(۠x)" ) arU6TF'IXߴQ #X1YoC3_Vd}3?]Q:O 09)Vendstream endobj 240 0 obj << /Filter /FlateDecode /Length 3899 >> stream x\[oip=A7 'Q8$K[I.I"3rj(Ь).9o.<3Ӻe|}cǗ07VGY?߃i=F{|+֖f2۶Bd2/w9l93Figk Τiwk|*]umx^~q_-Lhjfy/0qN۶Li?6[!7lqh ̅x2=Zq뗰& QX!aŽǴp@k$2LK/hDֶvʑh$Y%eAbab{dj>M eVjNu$B!7A pN;¿V,)#2xO[%gjH]aĈRU6rnx獶LH)Fào&ux$!h -(} vi>2\s3"E6Lziz]ĀXb Lzh]FR5rgwhp}9,E`TW8ՒSj#r10xjK 4ɶLLx'eSۖ(A}^* Eáw%h^I`K/a3Um?agt"ȹ½ Bk$l>_dl^ٷQSD zHK(9*2OPZL_Bf2GÌ8@VpJIzWͨ,}8qdg3!D/f3@6OqQp_+;̳ öbD[- ö.O8l}nwUr8wBY]`4n-kOqjV#+2ӷ#eG-DX25+╶i<^ɖUǘ&a‘( &.rT3C^k5qciN 46uUQeD&AU(!DP11 ~ңx/yz<1b$k3Oltn(PIbrYy>wj) mA279Hwxt*kԿVcXkAmw@#JK&<.<;Hl"څP#²_fمD͛л6eRgQhw3Гhg _U [x]BaF0Jfץ=_>|xo 5uߋOkeU͂,HXfy(Ë/V^|>Z=~W>{JyiL\"^:r( Tih+^{o w2.\mk[=pwNKi*3=M=$ `ƯǑH'I4='%\.oNE`q ,Nչiha6E~u gpٖg3APQbLJS&|?X]St%TգR^˃׶ֽϖ` >I%]}0C 59uOsʏ4x1̫MڼʥO'Iqj.0Ḥ >VCZ|b jHZ6X{ FPI%]xY63kn7yi?|^Fft ֢,l9ը5eJ.?&#^a\1w~[ՙ.1$AkiK"*5i0JєR;ؒ d'I.wd+S%2,QrX68.- b-PxU8QP2^KK7Ig:&u,C.^w}T(6,*MtMd\nMfPrUӡD:pm;Y 'Ңdj35>[,[ȻS136&ĤzIM?쐠,=U p1g5:zJb2! E„Nt{Nꋯyǜ <]jr\h7‘W{ȫ3dɓ0zoMѫi4 ~$ O;Icr]]ThF"NsIG~5^-M@[&-EN/K)2gX}Qnx:2z5K]KG&ZAUv<6QiIie0{2ؿdD&wMĨޣN`ps;2)~SfvcI {5M#u^I%?(fbV#dj$ /rTFΉQB*=Lb> 3W_m˪V N!y_NV$2Ė1yhq5 RXWe@i+*2ݬ%ĹiYxT& /t0NZpZY$g:H۶Jj#I*XE9R4ۿ?"QLI=7 1 õRv]) ]=جL%yu+KK+-`dG Q O6[ח~q":;/陫 QX(#E+n9k+NF5ۋYU@M}1U~-myK\؍mz!|lMhD$V8 e4aj0]6i&&=K>tny *rOg䌴%d$ҁIVrk 2yJ790Z=‡l"14qj벆H -$A(YKu!#vuFŢ4YV-4 l,ʹL0> stream xko+iQh|F SA&"pCYEr;C.!;#gÙ<}7;6 3ys{'lrn 1{0]4rwņ O :O^NRŶTLO`'ӽfm9+1ޑLUnz:!ҿ}4rvm{.v9׀qHRДVI$Z wHFvy$2-@pYǸbsqbPӯ (VLuRe)<>-#T :gp) ZN'\t^ \u},-Vǘ.Y=Lz ?mǠwaj 7X#t~GFNj(–ZXR89$3z6HXp@m3 l `+QDf kD^*N1/7]dA8؝&4ӽ09- G # ,R+AejCarP6"w4 Nzi`Ġ;8y@_gYJx" -Ԕ(E]fPdA5Bĉ )J%K?̽sB7l`Ti x v y)vo-Qświd0^PDRZ[|?}OO<6ַ'EH hĨ|-X|N:7/S%@p `YPŀJFص gOA(#HQNbI.+3"}4Lv9:`laB`޷ W*'w7n5\Aiy0ks?7/$YҤz}$Mڥ@9OdQn$&td{=0տKNiA']Ri%Nt;?۬ K8x cƙpV# x0pkq_du!Jp{5^"3+0&1Q

`( GAaT(.#DE$E9-|4p,\g@ 'uV~dG],R-( S; qTo#3ϭQ@dyguG28_8oz1PF )XVh͑d U)1fe=5,RgAN;+[֥%X]="/@,@ʣo GrD@:Ki\rE!,erח[K`h[;تPShuǧrƊb⌜kugC.D6BjqV*LP5`[[LP$\Cg$; RM7ڪ e5ZUTٲ$ ChEaɴF{VXXMb Ɍt7=0`c8^ӳzVu~$6[Zz kjj#9[h nMܽarТGԶ<7YCLb9 )&Qp9ni\:PV]jL}P Xg P fBB1pbv F3owKjR!21--+ors^_I)c"RHgZ ebZK*l9k&/<IvTΛcIV͗ތӆ֑nS!NdD,]IE9,6NQvPtvt_4?M&>?;8)CE,Md (reR%,ʷZS޳5+$#29oh~5~߼y&U}3' TSFe H8$ RmwTKϽGPcdШURD&} Q #U˔ҧNB(kA",y&,M&ي'Ҥ܏l 盾d@ M_Fq8P:la\Uo_}{o) XSYM[dqPRCA<։L05U{o[歟Q;Ih jt4א&-*iiqU3W]n~L:HO;iu&χ\7'.d´HcO!$H` Z苄딉GiL&}s }3\`: ԚUsīB6BVb;kb}1ZBN*1 F8rCf9v{wcFW |t֞#{tpO*:uxQ 1+uWj]J%F cajI Seq>U˻l] _wƱ4vbڍű5UOu9͛ܜ.YyDyz~QZveNMS[TMs$rﬦ _ג]:pKe9kEvG#S 9D5Ug]P-H}9pΕc5)?W~||>?p p<֓-Ek^iZUl~b`uwg:kB%H;tc;9!f='roUkr6|Z&'h,$'8}8+=U:ˉ !6e xf Etm[rzy5B6J\H{\Cث }0gCEUzIe}z _ .~(07Sx y͔_E7 vx>oSM9>zuTYt>ШP 9yl΄V>/@b'R%I|$0?'ƻ>}UM'΂3DI>XZ=&O›1/,.aoc XtzAA`U$ħ. ߙ-8~5~GRbx)ht6.|RC/Mk<^n0>bǤWrŘtOnSsa%䋌镬3M*REdI~wժ $NSNꄅwgj#"﷑"y&DQ= 01I !d HpR~Ϡo1V}-9[͚5Z3eW ӲPEz(ڇQv(g"Kye~V#}'!m6^TUɚlelk_[iwܜj>A0ݦN> stream xkoCZwEn|qvAҼMR$˲Oׯ wrNs+70`ѣ%g8΋CmĬ?;߸N=؅;4JgOwDəmde(mS_-B[9?Fba48=MEu&ϮɀtijMPQ0hJ y?`VJ ?|g?wk׶R{ m b)n\|M} ƐiEK+ dM#ÇX]qj!c[dSx _"6 4W )T8Q@n 0y^ [Vz7k& g<$=񁷢֢8&ҵwj~ xiq4a~~}Er3~'MU(Ix.P~dp9v`CƵ  Zx̗KQ IAxi+aQI [A>tM'p d t'j`g2M8y^ yS;2"{ 3}/,g#\Hҷ= |FJC1;Bcw¤Sn۬3Y(|}, ?/v^δӝ_Mcgwf ŃG;w}1>ܹLzC_v>~4C494o ]ĴθHƸX{7T=<ܼML4)\-Bq0* XH#PD>%23 w2{ꚮ 4#(74o.i磜N@/;N3&gI=)v:0_F ~Ӽ^Yg=KJ"S#J鏝*ԯ֚M5ٸaXn((Aˍ[j`kQ`LiF+ B>ؗTK:(m EQ.#n{g [P!C|zۃ 5i{t?P Ē|)sICJ)dŶdS'KC}IW4N{,NPl_ZȾ\Yn)c/r]C6?.±J%ygqNblMgtA1Oo78adSv22 2Cő֢ Ss"E9p1u`.L\eD6ufo{5wȔI~,ڭBLseUl-qj#GOy\'PcV6wFO̗Y^5U J$Y(zO(/8R"r is"UG+0c<,6KD:U1b0ȕl_n!@,C {(IqAb@G+6-h)14c&_UkNCv]ľ8[zRxL Icؐx-7^u\ocQɸ]EeJHB:F,}K79۰ 'i]FQ[QQ[[(-:g»5쳡eg$u'5 peet>fT{͢Ī2kko$~P3S&Y-r?UE92DZϓ 9uϖl1lSRHC[K["HMx4;Kh 6<|`!(@|GQ>P"p_d(Q$kCip*F=Is: wtS2oW#}*1NDU&!$_ާ #sg5{1f ě[ ~(.rޞ&6o ]A{Gh^8^m'dD7K,;Gm$8L2LQƞc=$:uTZu ;,nKߖ gp[}WlHpq?l.JW{H' ܣf˔[8 Xn,;RK7"uz/p m#xСXGKoG^bߒoG^bԒ`=RJ-J.0K70bi&nTg- :q3ߞ㪅uFrܫ5LNiVt0')*Qƞeޏ7j/Ssk7JJ0tf3Wl mNmiɫ)7Ur2r e yIOp[6k䈤WF%U_(3_/8I9utvc{[n(iPk|SIJe(dA(gTsK|K/ 2GSYڢVLbS0cʾlOkS.ڠT[q"D!<{ӉPtdvdb|Q &AGu4:oA*&g#qF\ ku ʿ2q yؗ͘띠)>,kjTY%+I\_5?:ŁR ,t+eOzu_nɾ=w)ۆN 9rn<.D?I96XA>Rs_5b9. <ϞoMaou耣z+ԚׯY :R3c?耂fg7 ny? Kk ̩+PqP,Ҵ[mDߎ`O"<=b0ww(B &9<":8EWRUQ2_ LiW'(L89X[$Pn%X} z[̟#Ϯ^J䔛m*.0󁶨'o{Ɖ`_pi;)Uc^-8J twisYCD'4z?*e8q4e;c_CqaDulj-9*7n[%A;X/֣i|#oxuz'۩ n'3b-luU>5=Ho.i`b)7[=UQYNN*|[<)9U[z#/1* o%7IU̵&NRbL[>!r,h]?naSϕ%ryؔb|41ڢ8l0MߠfXWHpoC?mS|t`H1mwkN`>T>jhuS ] I8Ӱ_endstream endobj 243 0 obj << /Filter /FlateDecode /Length 4560 >> stream x]s7+4}f5?2׹4Iwg'Rqmr r%,_9I  y;a0_𩝜; x7>{GM WTFL^YlιPgBb&*͙[;L/_@Gcp=Ue [BulS˗8SĮ'JKgsSqgXbz -WӺ*YZ#uRW1h^$sYkq>*ʹlWpY]i3KV ]#DjrVys&5FiL%TU*߅ZM4Pب]&"AE -Q L+=́Q{9= y1q- !?&pö7iVVi/"+$ ;(Hjl]5;WbU͗lOR۶o?W7AB sa7:IԼ a x?0}5nE̅uԴ+ 5(8~Vf?7M] ܖ\pYȪN%\2Qm"h<x-..i D;}hQ1D|&-O k&кkհ D ES/0NsWJx-Wiј ¾.IY&tZ(f@={4HJ>L2pOxC1ZP 3<DV394SϋxPJQ\ )]yvu$VTNuڙIh=(9Bwgtк+]|1s|!Oh =-i] I.>/p:W>ɑf]%T ,ӺמynP evz(dF.VnϠge\zs~;::JsF|Rx&N&pL(I3 22ޣbuUdVQȆExґD!nj3P2~LB52;I43y趼 wg}FGYO{3 #}yDhm |/ H] T5hvРZ*okѠipG2h=B_&hO_ O']*Hn ʩ8Fyq6ƑnGg5E3H/WQp0v5=uФ?dd!0aQYUˢ$&g8}OREWb~NaDqшhDTF ~}#J^I|BZ>h9aGvQh$f=xL* .MUMJw.ITIxU^s݊$ 4YMD%By%%g{Ɗ֋W"M:Gnju"RӽSTLy[^QRV~|xrKК N"+ Fp' /ҭ+IJW|4T3'wG[c`h::,MhByjg^͡~ԼJM:,4 gh%-9~΋[Upv4@c8QB9n=,R܊:3AW]vPVAL]E)w*axEe5@Zcyk}~<6ĈIMуb_Y쫊}u)e]]} a58?mMx.Z&!t\2m3πs\Zoߤ0]+ȸڽ hp8FPG&y7׸Uٵ>v{5bA\/JA66n)ioo_F%vVU}Ҥ+嗿eQSϓIWP[Cw܎dW` ;ُxHc&zN*E@ٝZިQ/$yȴ(!hYldIȜP.Mps2"aeY9s~ ].6&}nE K̤O=~8՛'鼟A%qSUyIy+^}@2nIxxP wDc0nExX EHX6s ?6F ݽZdo1V8H2 6Hw(JibK'ElUdq\dlaj7_Jt t-߁7ZyyjPksB:-tb,J:Kn:ijk'Q9dͦk7q|vk r:-\`o0Bn3Mm)H(gCE7o;N*H.F4l[CF">PqUl^,{C-Wr#pbܘ-l?d}{ۋr"6r]$h}.9ϋs{Mw;G-(h%}Ī]g+:*v'=w+7O\l!=e)drA6bK&n&]mf?A"`Qѹ}.ƫ[QmcɋZiBɋS%?m(yRtr ]*$D@pVI{~]om^M]&o3Mm)g%5MyP~FHRe|YqW#+7Yv_ `RaFxl?}!T? HzXr̓]Qּ[,U/"9MVC>Κ2 싁dz{u* W$cĉa኷ /"+aʤZ[c-fɚ$fWZҴdbk5r*CttKwXA+HLqVk ei[FS+@ FQ¸C}χs1`_R>;yw5X)XGk@a%Ի kG5@{/ 7CIlpRYolB\w)eus;DEh4($!V4E;t\w/|Z'7ڵ 4\ۧ} ɞɃX]O#:!t\EAy]CG6wt˭)gD|?0cQIT笊ڥ^H}YxA e#W7sN[foX6uٗno8endstream endobj 244 0 obj << /Filter /FlateDecode /Length 5915 >> stream x\YsGr~gG qpZucM޵~bbyYUw r:2s~}G='<*'59)uD`NNzќDNN_>qdklgiӖ{۝Vz34qysS߿`״O d`lN'<:7_mwa9%dsJ[cۜmw~HYyǫ K.QAc[;u͟Hx!xu5Zzl^ R)Ҩi7=Mv1m.} U D6d~>-oe|p{)a5'͘PhOyg\i-V6ϥFbGkq'bfLv';&)]Oz\5]ȻhӅ@C|l0xeal3uä5g:x x5DSL\(0z"zy}pE|{g rв / ͷODW:ſpl H+M#x&ꑁ]öJki"~|!Y1̴͟q!ɯ'Ba%D`B$֛+DwWCĥ0k.i;|J#D3Ѝ<箴Z4XƐ.Ttp֚6lS@9г2Ӟ&KA  6`,:d;(j4D\WixY9w[xwJIs÷ s.N h Rz&[ePтhDNQ$`}ZD!"㧌\?HG ^$j?+Yw$4Ƞ3tQ2GSU0 ! 6OcK кgY} k6AY4lYVw/-{'P5+QÐHZO;?~Ϗ)i"/`="X4; Yl15ef*}*$|[η^ VX- #@~67;`ۍ=RLLȬ}`XѾtpD\; }z-F緣",iNM; s??붃Y$e6XgXzD+HAe7۔Vl/`寮ܡ'n).(. h&zd;M q$&qg?oQй_5z4%׀ȸFKTz ңiNņ=i-4rCC㑜܃z*;Q1-XK #~,^HX_)Q#˓D.`E6lrT\]$cl_UTllOt|3ʹY ^)#b#EXȸ[ts<C.RBS;He?xYYtR]Ni̥)Bp߰K4Wp_!Y%,J䃝tUҪկlE-J7g8~']rcÛC@bkyϠ?bN4Z>oǖl*o?b\1H4F c08,;$&_%]eTTx >׀f0AeE{bhV1ĵ LNHM#>m[[r, CS<ې@\Z],ng (k"sf%$-t7rjVP@]N츲x/]=ZEqClb{?K]_DNMrɈDcώk=NE8k<Ân`#@xxiq|݄3``A*d\G9 X)BaD6H:z@Fa܈[b. >Tk\SN9959Q%[Q+h\8~vTwrǫUBG 67E}{#8$:YLu]GBON XusE&&pc%$~ gc؃QFS'n q_wI_/jY]A,iؒCB)ڦ8Iקz} sAh5n4q{y[|6bI c\!OKyVXs yD+y" BF~ y *~qZ{MN5&5|)C~$-J@wM1dh${q,i͌l"u}Ui2tY4z+ݟbpS^4j0aFӶ\dk,fǽ|IBf)Djza[?k#O5Ą4,o i3դ&08'twD0 bDKw#jX *ꞻ#osx4%k2SQ) 2I|MO^fWIOpul>lRw֥u i֨(;|iN̐:_A`2u)1K}GG@:.nUy9.I7@B V.qzl ~LfG(C5] C8MbDӟj"9XSf܃&閄xTH\]K\M +a)O??aKyJ-[Ʈ$F`{[ZA>4_CX)[.ӔV/,٫^Z$}%4z'5pM71)7b"D֙bL"knLzKW"Ѡ1#_N-{`TiӸcck*/s `n Whbm`I!'kp{)}c?MW?M0WuK>.p~"^ɇ&B*:#kו mj7Z(1ṠE6⸩ wX ^O253pa/̃:\K Z{*[mfz` #᫛N{" ^\HxQy Ue!Y%~Zʜ?F{)ɦہ1Kq[F/'jZc &aI햎uys"8эofeq쮨(-_&Q<YaHqezVNr#>&u`yvaGT89AiZɓŬANYGkTL,A1gMر"!VMaZ:R |uɹ*jDWS\{q0s,[ mlصWȒ_P:]!5&ޕq+LIU.7T H &)±.Ҥ].nL `i/w׫iG2tz-M?3*Qs I.nWݮ&Ɋ {%Fu4A a|(Ehޭ>c #ECJ*|.~rN-Hu)jͤ&S4\0eՙY`@ P(c j5Ėf' u d I_N%XVq\'6$TMkbH;R>K&:$gŞ%yΘ u@9Ɋ櫮RLfeC[5*|(|D7PXz'T9Ji[.5n ċqϿq d-~jAp BjJ߾5 i`&`$k7hqPL׷V#ЀT =&uzsdlqQ\&W'U)uYOUABTS^11"1 o#ӒLVwo8]lN,׼Qg!K:yqDRP+IK/xC Ky厄& eNMjD4*zW>u}LS=e? i]1ֲ`)MeYr :?J -ȥbcaͮmA"-,Q51{avR%7kȴ)Bx Pw7usYɏPK~ hVEY=\nSο7_NU܈6z>q:[RY.{99B%=LZ'p^FmgK}LTnn{m>1>,p߯b$n¯{0D% V/%&oJ=fٜŬ%{V7ITxnEM.KxmJx!% Q6KJb ȶGѿҿo}Cendstream endobj 245 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 4929 >> stream xX XײqdUDel53(*$W7LwPe /"3353 ; "&޸D%qI^f& m_PuN_u$L>D"aph8^!Gߐؽ,`Hkt 4J$avGFOd?c췦ttm0?";~wTwc0 xFE3}zll4ia&M _ge;߾wuzoBvFGGدeqaKئE;]#"DEz,][aGg̜0Uf53ˬa3 zf"ȸ3 Y804ƕθ1f.3Y81rMfƌ`2!5#g2<3$2%%,%u8e2"WoPn|mʲcȁVN 98l!CY[Y9[C݇VCaڪ/K$݇bh\ ㉮oŨdSZ8ḺrjXg(锝8ex-B<d<yZ '궦~_tQF//'WTe}#sIsDkz YV_3_A*IÆl\E% ,Tj.)/QmF,q9MX`#Vc).e_xsޖ(K9e})qVy$@'9_<| 2 _Ld9FYPuA2P2Va%hFI\*O`(ç o8#vd 6R<'r@A.Wɰ|a-XXahxFW A9DUdAY\GQ!T&=MXvDO$MQ![KHJ2 'W!+ {>k~34 "74U4d˷Gj# wpP+)ij>Wk2}n۪L.m < NWD;'d삍;⣔,q5_٣gȢj଄j4: <̫dD6Lņ.w^ ֫d S&+cN3T'9A3,~09wJ .1֘T"+-X2iеۡќ'\ެB 6~r>u55uMg7Ը+m)j9g@T Xj|ﯗC@ duSitSM7eu֍-޷peC+3ܥ pWC(trANS, )‹B-Yl l̸?2f|z=*NEfI&.2DA\=+WbA|ʀi[c(wj-~, Mx7wGkw^)S T;њZ`VpxV.@E'g38lՕt [vxGSO2 iht2-8LB<w^uMI^R/Q<9?Iۅ;*vIpd^d!:MN*.-l$E|1DUGh]t2;F+NʊPe$)W*9G? Y֕+UG銀{2n䦒qçOAhhp?2=)÷$28@KZ/IxYC-pXq[ͤ٤Y iҳ2@$d-Yv&ifnH2!!.$]TEM,:)a~*YAjq/0C77RO8#e?eS8o!HhNzi&q'd-%] ~RQ-mfL)Lin d&YXdDaҧ$ n4De/"ۋC/#)x}fhJJc@QQ[/Y vAK3gW@w,PPXhsūQ1IH b8ߥT5nd!u'Sqyܒݐ$e*—o}K 8<0Jbϟij/b+$]NJhJYp\AVTkVS@f fs`!A5h)5scY~lQf!ؕA޾|[U{# M#p6!9 J͠1m?T`⛭fgqg'6>="LʼnT}Vڌ;:7ptrK'3l@d&h2we(c ^J-FiX_ ]Kꦦ~'6@ef-M ;8#H\7|yWnT]j棰eσ%5mk{3NI_tS^"+/XI~UNhriqy5U+on h;0Ăy膰)(p>$s=ίUȭFfX !0bG(D@<8[Fj{R\RxzFS2Z~hk»[»*o<46M!X~B&S \Di|uuC *u6E|wBQë́flٛ.1Z*twѴk+YS ŏkNj3y27]=\k=r/JEv$h^k\fTn$+n[Ӿ4k2uєKyG+;6ק$?~HFE̾* nKۘdjNJSh@Nk,ñGTsZEozS 1PvNj^-]:"Iq;$B' q7q8*n(gk* ֶ{ǂK$=@emd.%7LJi:NXҽ 4 8!*ϚDO.%,ҠyD믞^;d8k4Haƛ gITRNwJobvMA8ww2:WW%?0)b_z4ut}<NvkrNtx; -Ưp4)i%#c21t:X@[HfoТ*WSGMe}rm /~n7$R6ڄ*y h?Yo+?b<炷¨9Y}+߼&VBg_}tD[t|RL ^˷UA; X/cg87h:^Ƨҍ1j5e”}?G_.{t]Yu?{5(~mѶg:S- %&.t=&xlE͡B#pۗ,p_XIwwUgeo{ll;~qCaNpd^-pmO1JFMjRh=p@qR䡂qcݳnQ˯j cU;ϿVPz{oQ"4wL*| :OlaMNXVGA )U~;чUr:ʈg.+GEoYkʵDˁ:K+Ozendstream endobj 246 0 obj << /Filter /FlateDecode /Length 5351 >> stream x]r7g~E^{F "a=c1㹈(n4%Ύ`?{3Q@!Bu G%t%_W_;vI(]/'.^3|7¨ [c;y>l93Fs~阗u.lT_}~~jr6[?n$ jXFH&?73qG "~tU|q#O9*(UY#HK2# 0:$vj[%3Vwsusiҗս$\F}Ϡ/5S\iΜWazMe F[&uAM~5riPIJQ}(N$7LthDol66B(YEL gG1mա.MYvТ> j_P>b yYqAkf mn| UM:rTlQAZ_8Nt֣8 c?+^M.BJfo!F,dt<$@p٤e۵f#$J5N(:jAmUK#)^ %5# k \M4a$'*,!uBrҁAW!I}'1.u[wt >\WRZ|REO% '2oVãSJ#>JceE\[gj ^9&JjzCԯ%':ّė3ȍ>)z9-Ud颂:kZZ7!Ch'ק ;A(koH/Nayti^(SH<*zhRz$~ FΫ5:e+g\4Ahb]3dYA2p{CZC9uA&X- 0 *WVXDdeӀ_1smge݉vT]]˝lHiE:ۛUR X95 `V*~`zP`~:xKXhd*SAE= `W[*=몌` 6s-60d|UwNPyI"6r f/>U5 qP%`M-[EvRg2+05,5ӀZ2j`t측_0Iꟳ~~{K}.Ǐ`yr率?BU/&0cR.f#[YD] vgYB(>A@&3W,aJI"0)qQ"Y^Kz):T,' qГh:Lzlb3]sZ-ƌn| uAU(D'f 34l>}4"XUsAzb:pLA5s]Y;H`": b8̠2zSܩo:%]84=zzQd 5y^ƳG5}ӅzNMeU~{ A|ژqh9p'_=W?W}ßX7WHJh @- 4^$ЙZ+k೬sZ8Cqx$JÝyP$0t '=T47.ϸȥɥ0áЍi좠2J|\HV([K;t0m44Os=7n,{AgZ0JV ] '?m|@q66aS$}3>Gz93S2n8&? _sUZJ4xKg~]Fs@C9ڊ~l5Âz X $5hhff 0q1c`T^ \YwdOr{~K[9OfnocBykv O*qғymCt'NGژu0_߈Z pM/ /'Tl 3[к V IǿԏI}t Z=Ql$) wY*(kX2X43Zhf^вyq!~ހҔQYnj>@ȼhyJryE &%CZtvQv6>3 rPW>eۋs̅~I޾{FZvncЫ!N-V +=L}y~Ttigg68PBQ(EvT0nTYa+-1Xb | Al.fG vIy#m˻<>;%KۃXB#%/b^#=MRvXoj(3uL}g:fRߵ(>8e~ltxcBt&As{b]< pu r١upw{"3p\CE1 s aZIq}h@x.p0Qːh=g]X1ua`N+4nSdVCM`0n.>C օAC}b'Sy&iϼy5 onv(@2m${y %Qx,n`& =& A"-Ϭؕr<څ1NN3Z-޻q^sΰȰ:Q68ůKpffFV7ǃn| /= Mͬ]EXĞ+haV1eT6kDj|8, B~pV *!}»J.':Q&1GH%urծpm rS!wwҳϼeE=QX= 9 ٗB$y=gCnܓ*%)g^K80GUXlS1ju CY7!Ĭ$DE׌^Za")RS#bXJkl).$/ p\ed)0Ai>f0ͼ`$[F?6 LLZTK=h$ ȅ@%eD/dn34C+eaFTL)JsCp%ڳrOyM ١ֵ ^׺ C9Ȯ :ε$i>A0^%;'@B>#s(#! BQ#nfFdҸnlUMD4 w渐9oa OQ5܉ԝ osgZ6έp̲ (kR#4 \N#֌yQ} N~lsGVJGVt֣Ҧ獒* GyPAY TPT4u3/hEiriq`oy#;q-=vSD豿%!b?u5Y L7|ż iZ61 C9 R#_.'7D:>x1>>ͫGapنRs&㲩w3pR:8`ʔzNI&Hv YKtgZ3sqͮȪ+C*De32WfkjZ;<ʚa^-/eZ.?lp{p\1UikXY]dؒ|C!Gb.SA$^;ffǻ\HL7mMYn-#rFCvNEX[5]ߘ:FݘJoi{}z,u74ٲڤ޾⺕bd53erEsw[]ٹt%f 9Y! ֻ m|4d=Js p[0ۂӭ^ ´ծY WR7S}bu\L E nglo8Gc^ه9#z"'Na.T`Kz%L۹6n9iVNvٯE$v+w;5nЅ,_?6ڶ<En7oi[Zኗ*ѵ˻Yq|s~Wᙺ.QK ?ϥ$٥ָ:{#\7<ϺIMZ:ol`A2ZMCx%ս6ǫ+|i2^+S$ J|mDMN,!Є|-gQ }VLxh柇ɩ4x-܁@o/Gidw˅g1< aH8endstream endobj 247 0 obj << /Filter /FlateDecode /Length 4198 >> stream x\[sGy [2Ky}*$p A|x-9dpQwBQTIkϥϭ|?.Yϗ =yx]|K|q#xF-^,xzߊg0fnެxr%zj{缗]ܜQgpj-뭴+7N ּ7Fsc9;*zgaVz唥ߟ 4{Z|tZ.ʧus(0esEdWȊd)nm*-*IMj `́ߖ  fӣ/`e8Y@GZI' GBu|:$A\JАt[xZnб㜃 Fw4 >Eo2( C$@\wWc u%JJ\+H;PG)ƒ1ƿ.,Z:(rH :[|I` `( F"+FJ>$ p]^B+!a-33X`~Y9+1u/ SȬVcG_ %}t@ O' n0sKDw7K3%t@Wv#\I5qR&-S/'2CI'B -yp>mɴgw_B/dy,lyt 4}Km/ MdR D +YbL:Eɨo]C;&\}<^q|QG  K A%@VWNߊfDL7$&$ 6&ڀs= @{rRYK4K0oB^^^\=_n珗HXk1:|0Nke:Lh8 _TPanɸNnOeq4|띬^x0A}> 8r%X¼X|t ppHhIke%ړ\Q4$Z GƒT[>ܚbEswr{`i)"&_~UFl!nڽK$7-VN$8|j5c 9t߯U?Z);+-GC0{0An)Iu3%(A^|_#w!S|Q%qcw[<ݭdxoW 5 ;akZVAklK7~'$` NY ^c8l*xΗuń]w}wJ{S7aNII o2]2첨=Ʋz_^F}5Qo%w'ѫ1_t%jEq؈Bv+|[r+$:54V(HfGV(UP[! DLtg5#eٵ-s \Z: /"N00`#B0!vsa )!cvBJ {exr,}Pb̷mo+',ظ86S[#Bx1|AwtX.(IluF ?<Z(WU)O";[,fOpگPݳhށ(^27A"u Yvs;Ѣ ['%>I ,>ȡSWnb0OOwS8ZvSJk%hv%J_$/JCy$[)::Lvj c=@l,V)%p\$̳*$N~3 6|n#fo]xx@K( 4MΗ% 硡Xs-ymS*|Ħe7 @4ٔ ۣ#ZXX.Sɿ:)_%; #r@ F𺌒u覞u 67t5Sr*5Ci*gQ "~r4"d/-Z8"RanOZPHae1oͱPM` 9 Ys"=a2/:/DHB^QK%B$=/MqyD(>طB=˨_/]r}y"/O OI;˹у>[I͝pX­rgsd>။8nX㑛2d@7][А~z<#1U#kQYbᖐŖf8-i;Lm) 5C Gvˍ)fy$'*]ݑU#ׁ4Nt}acL$* MJMy,$́yCI*/YOeX-kk mt]5]uFnQ7"V-[HU[6ˣS&'+sy>rbMK*CmI[RCʂAs4'y=lx 7r >AU~BR`{tp1DzgźtmE˹9i9g 0/h;ukݎ;. 1-E=SkmEEgv"1(pff!"^ cͯ6}7_5v"j:ȽYwM>d%͡Fr/.u˜&NZay\4t`Ukfi,"$FTr%FP Ƣ[ln&WDٹZͯ-Qj<"l~X`uyN9|;V3(&=XR?W=+*U.eb-GE_!rpW $μ,vNp+8q굔tzNΗ> stream xWyXSWڿ1z]oJ{Q+ZXmmuڢP7DwBB6 {NB@XJ m-Z8_ۙ\83Nν}#b&<Zycbⲥ܃< |Jo=|eScB^: ^ uɍk"87;G0Ѹe+W>XK{0SZ9UY*mfJq ˑHD>\._ZXD(GJrfdeqE-qw[raH*mfdbpX\"yIQ&OMSgdZv9]e1xxxH ۈD"xxXDl'v5.b-XO,%6ˈē&b3@3,"伇xr'\';&)3Q.f"onM&'&gO)7ʧ-vtz ;>C|I ,D}i {O*KIEUAaTܕ;U|簶LjQeKe0;]W&ֶ)C[+ձemUZPERѠ.g*U3|VQJA// T%-WF1 Zkpd:8}7mb8};h#St6)y~.& B ,ݙC ?E0w&gBVr2Ů@,T_cݡZ9݌c{ {5C'p>oJ ^e|/ӸUr*SH;01BqhH>wv7J+]c qH4 ;Tḁ! 4ah 0/p& |тh:1?<% 5: J-a{kë[ABٟLb17dB `1"+Rϭzs9r;@Prch~TI]qP~e~ù 4sЀD ji U9Ç[h88 >oLʪ3nТy/ `%>3G_<*IUE& -,G*8m82^SBȒJ 9 E` w?54T+@݉R()#zDoBz:]FCf[LPp=xF;FY.Bه^4H@l.U{(ĺ{@-PgBK-V<|[ӾajM26s2 V ex3vI?8ڃ>ढ j1UJmSc6`28-L_80M*J9WGSR@7WMԉmGS 4V35їUZ-R6kb;H#q3vx_ssp7m kkٚ.Xn&o5ۤA3SkuYA+*KtRCT Q?9;wзw@aOϛI0BPF41`71u F(R$- P;-5f `?xKWUJ2qfaMRq1vTQLUVy\.DAd7\l54k=MaP{Bc*UmJnmuֿ~Oe FJpG<]mv'M]p\>9F|H|,uIB|j, _⌻v b)]]fyOMu}7XW; ٴ{LuT4 a8+̺ 1@P_>n;قOJu@+Q Tcpg-w%J:PBvYh4 =؄pW} _ڤ,&F +wO&G+3ğdL1aFK୺~ozj.f܎h&N~6H 3{@M}%=fkbox\S5tY^Fq=˩50eW/B=޾-)iB g/~ƏhS߬ė_ sj7]j>;s~mQFʋBVCi5%X^&"<̂] yz8j6iC<|wAVe (bDZ p Ɵ9aPH}"m[0O0';  դ5DV*osO!:]FTh#U+U`(bRC!O7On6> ?5#A\$=˘H*z`co'FYrN}; 'wߙR&BlT5Zܚ:\`}pMоIIyQ}mv5T3m(4UZcp:̪ߎi?+ida HJ7{ LuVJ]j+^!ʬVYSYQ<8> ٓKq3gT-Ĩ-Y?ƣI thۿxbVmb 2}xUS l(4\;̇|XI+V OD!i,~ VB'YeR={Ѥ-KHpnGGu[.3F&Vh:r R#i"/=ALmn/2)0壊'¥s;tV[o@VhJ*g^@rRUK2UlBν/u-H[{{sM$xi;P Ƀ; 8cU~y^^ibvX{} XNN/pgD/H$9 l'Y)Zԯilit;0V?/ooF_UEgp&|i@-Y9YE-pOYm "x0|{t1rÊ[ qk*]vzʯn)#s-*:W] &[ 瞦ᴄkBIѾo. 0z8 'xbMwO9]];6ʠ MBKEJTzGn/,SQms wz rzy= Wד912yd 39F!:)R;u*A/P zendstream endobj 249 0 obj << /Filter /FlateDecode /Length 4960 >> stream x\YsV~Ė+)qH%q┕; "ej$pn \ƕh Π@_l[K^\o>nno|?Xzgx𭟼Fm|}+n~?LLO3v8:),yzg;sxQk_Qg]NލS9%ٟ{xnN݇Ԍl^LZ{w/*/Ny:y<3>#}sĆ9_9 6)vݟ2|8 E=))`OԇgH^rˢi^&||?V~gBKBOLXMZ#KzUzYɦ#034p,#r(=Սxɟ7R(8Z' 7a&D9;MN9>!U)Q 0;\&q8Jf%UxjR^mG PN0K'}'"i4~ G2n=Yڊ3&L H.N%'Z:EwMN|ƣIPDH q {ϭ"ܭ+WZ(\hwաWJ'r0E=*oQ2pM\%4Aa^xRruv\w(-;T57<Y()kKs4 5(Avț/*_sɩzD@$[`Upe9jT5}6!-nqp`H/_MYԾ}zw,ށ.ejYܜ"04F̯;}-;ĆPO̼#w_Q=ͱ¹}@Vl=8K/qx04lt4mJ*"-&-\xqЊreRGG_U+P9nEف,}^6. /To os?Y`u.aAx7Jd}4D^BEY>Qŗqcy|`?I3op&>)#;&ʘ7C::t)_'"D6 Ջ(ez .ӤWh6Ze`tqv`~k D6Ȏfb Kס` FI:?7dž`0ܒ5X`-[1'v;uT|7 9˚nE˕~{!!$}7)SїuPG&M4ȓw'|gއ*FIasUZƟ-V |@az %~4)sD뼧:F^+vVџur q^)p2*E:V#4@@.;k`0DlT,uM5v$3Oc\]%=m5-Xb~|*(:[1y鄬 '{bUsq=0 ȋqt!nESs?Ĉ109j_mv=_ YQA H(RȪxc?w] 2>Y`r}%q} ”`8_|gv8bކV EȀ֤Tq {..y'z;ϔ^Z^}t n O}͓_? \g :ÜZfB+;N_3Q &R`ja;apCY#z;8*;a"UOL]' g$ٰ⩓4i I zr"}UK}Hߘ*Y0b˖S{6}CN^˘B7)WȣFL4:#&ׄg1 fJ} S%  Ze,md^571]LBeO S$~C)4-vKl|\Wd`hF%P)!|StNBFc&Hhw wڗQY Z#@Nq辸9q֞}c7WJcW?m?$HVz<N%x@">/}zǭcv2g0+pvy_ZZ]Qرkcj#RО_ҎyδGӨ:l€+CH9 |Ҁ}(#L1$Q$V%/9_ 'z&ufo"JG9NR@ ]N"2RP6j#hx0D0-矏#!_ @T}\B|Ji֝)aW M:f>J΅ ${SgBO[˺bTDϬia#uƷB mNJ{^%')]C%a:ߦȶ-1P%$X8|8KX]cln.`qL4jzNJ1$i;c'&.τAowuɧ"'H'GZZʢUU'«d.VtFA]{QOA,k::O $,KX4 )v9#(&E܋RmMV"V:quU'ndn8vt<"uCfT#_x;o6oU6(czg7>ݛ7~Oq珷HF*-.Zk\iW`gph10ҢQ%&,>x DKPBI_OJw!亣~H,}ӷЄ ɩQzs4Z+A6 hJJH`Jߜz9JVRFc2rNJAw$1/Z Ŵ$V4asV=A z7q/㻑+A o=w( XI ڧ?^ 8s8]yĻy0XzkQ})d`_aȏ 7YCՉnHDՀdL}CC|ڄ[T&ڊ.U=b aD(#B~͡ A}.pr:1k(ov̈ÈFut'q טMx=hռH!B7]'m/ Xg(g#x7ėko[!9&EYe4M}>-Sc2|*!fi6% VWh>ycAl}CN5SE2;3ߌ$" /N I5 r]G?it`$2Vvt y-' SDJsUad?9]5 &.7H.RҢyqV5ӱGbx;_o#&~E ' Eu7^FVφ).t龯)ͨM7."bXĔUeKBk{en-)&Èd"HxBc`Z S5Q0mD^Ŧvhމ\JAl_-nuy!"$WGg#qsC:wPy^Iq5' ePF?N$yX=W݇arVI{4E5QWkV;HOgn)S7t>{dW SlSP>jrZ`?G2wFřGR]faL:R"a=]mr"{14B#2 @'W7ݲԿS$ zS?B Mnɏ}įU~k;U5 oE[5S5dF*!IGz"qHc'[cHT}z3c܄ݽ޶Kl MPMvcZy;^^yL RW㬿9{+@{sQ+_p5*Pߨ@(ը@iwyFJA(Iea(ŇiTXoTX(jThle)9ۅIp #Qˈ)M!?Ws:: fccQ$힦lZ}h;rQQ;m;bMɫYFMJՒњ ?fT'MDyGN3 L⛁hl¼rA\zMfendstream endobj 250 0 obj << /Filter /FlateDecode /Length 6048 >> stream x=rǑ7Lifh}xkBm~@9(7ά* c "fMwYy_qa__˟o^x_XzgSx_Fm~Vl,g 7glsvQVm_%Wbf\,zoQ.XC_x^ l^Xo_ ? L-0nwnt 8Z:xzab]\/y/aNmWu•sw˘NiZM!Sgr07;8ۦŜ͛\#w0&n[ZE&i?v/+5x+#kgg+ 0`ˣ a <^ `mH.p0X4Cs䲾imo/ۀKpӕb kۋe3J>W)y[Ӈ;RRF@+Lgvi|TMϛ"HA@@hp̷ܲ9_vbK3Re=}}|__xQFG0 @z7')$Ge`<9oL}KRށLSE 0)%) ӥ׋>a ))4 uR(RtTuQIut4BS"u^њ.xHt-\[BWTheZgod˘g&k2"aw Fo^DJup:0?kH&Eh"JJ {H/X.\bS`7(Yp+NEnTƕ5 9[ jAU/Vt2/P,m꼍h-d\}}R^U1Qs2ҀfҍkMpY394avzU$Fx^]Ь%@&K]=`&$=/*H(#((Tnf1 /:F/yE s@Nqzs=.jyT$L 1!2T$9(r lp8Cs{͵*gru2: ජEZ8)5` 0&޷0nQRO8lWA$Lh'x*i~ =kMjnG~m5q-XtFh'ߎ~eܚ- F%wpSun&^Gz7z\sT l^8QPkU% \T_$ A^HW'>"XAOhє+'?"X9◕ᴌ!0 {4\[J>)#w_ؗui…S2zI\"[h{{up (t ;?ACVpox|?΂Mz@E=R΋qş[Iyid-'+/_Tx@zȏF 3aPߟCմEˑ]r".tߏN=weҡgqYKViV$Ku%I.O I3mcbq!gaA!#Ϛ<#k~oB: ]n JNfr43'پ}8@KAIAreZzxQty=IW:Q@O'7!7v\ 5qt#I}+ .*FOWEZiP0I߯vc̢C`ɲ K6j@ ESU塮sMyDSυɳZwd-&40E}ifoLqD1q4( $u"h/7uItnո0ݻh|绺8!A4Iu K aأK%83݄(B$tx ؄@َ/fc6Kȗ(s9fՋ)Q2IQJԦIIMVS 'a|4<&*٦#܍cdcԇ`lf!7B+8沝( ޯ2Cq0PH!s&J+u t1avomaLߗPiS!Sn挧{j.kH> <+ِe 5ʴlR 'R3 Z05Y1c<䦈K̍U7RA5;8#y3̘EеYMZv~(6嗀Pp,@SxS1@S{8D-J٦U'׫M|Xb̄Q9q$hQNr4b2,v.LEۚBv4]P\?YsSsR&Ll?sR\o p/yfU])kwoqBu0-W PpR3TZt15zLM^rᮽB*}ħ^Ӫ!ʐBblX,&Lŝl) ҭ um )I9rIV.Z*sa/sTm$t?A2"Pfu;௥:,9cC:-ycX ywowft(Ѭ <*9JL%l KfvS ۿ8[ 3~v >y FE}oP]-/f+,6dIdu rɀ7Sv@TaffQl(FEv.3(2ե3K/(˵ܖ'dAr=L^wy(бԶA]PO"-J ˧D*U.ItL|L!)WNEM+ƽ/^@gHєs"WHdv,Ѧ4$J:Q;׷H+CˠjQUSM#ZsU卶0ފh\5ILe:+Z gwD+ DEF1EŁתd#b7_F<"֋v3Q B`]uD""ؼءy$K-[]E$H نO\TVJ V|2yquU<|OW;QtLf&`bZiպB-q"TY"Т~O bi QTw*QT *@ )DH0"p1[u!pD1" T^G m+b`s˜⠤ [E<EUeP@[ Ѹvp1Ik|䍺8aVoe/~P@0 j+DArUv TӊkZxMï5`BxRd?|8YR嗦4~²lob[Pf#:ӥթn0ܯg߃FhpY djS5Bwb9 <':D!n:J,)5eT7 d?! nr8!g9!NXNl={PZhg%M/ȯ7~shk⨈acZ#f'OW ҢLS\bVa"3k= :fmEվ֫W;u^D$K>+K&03/[I]fVԨhC 4GNbh~*֠s5}U76ċuTz @~:!wLе'|ʕ\0Aj(c(^px?ʡR/EH8>J|4xhg6Jb2Q J9u;dGnsDZňZ֝ Llr@>"y Ian|by &NVGIZ'ޞ[8Od[cFKU[qx{.袳xBƉIRMR< iYDA0@;6J srѯ>&]G7Eא%v$fm{Ý0aFLđ ܯ؅ öaK>|&G݈n:1%3/&5`3a${OYa@iV {qٓǪZ.:ͨgeK˯g 7͖Ng+KzήT78VpU>o4/ؖEI[l{:}$ǻRDcF% @w򻺑^&]:a 4iEH|3?91<0 lnj4_A4Ȧ _,skt+#dB[hȵ JC#^?*zч}v z=> stream xZoFV*>Z*rI $iOUή}Y{M =_ !zfv7VP{r:{l7'8m*/L>T1e m0LS­'0;8*./擽scA ^-h#Bcu@'XDey,ո&{Ɵ5v&$no&$HP)/Ed _ 2%F֣QVOٱ0P#٘51;Oߪ@AmmFy}JՁǣ?*oendstream endobj 252 0 obj << /Filter /FlateDecode /Length 2879 >> stream xZ[ AÜ"Gbq"q胝ٞx]qfDhֻnHILto6决`3ū͏v:~Id?<|mwut< {?Ÿ~yQZ/F}7Ъ.?&. .:- ;xy?4 0kqauߗwe"W)Yʹ򇃯7;\64H?Vl~O !"ߚ68.WX2TAYuSYge>(_TAuFY(哋&ӇM4מ 4sFF[*=Wyc-5;i0 {ۦ.cxqevlZC\{vZwƝ4}?0/̘`Ez ?ǥYkcVEBlRe'2%RV1;J87l@( $|5@H9*(G[44,-'Z~YfA^AW*OFCcXd2 Ki]XƁi_Y tp'%7AڊTRoėe Zr_'gVf_6+d2[`DK%Wy@B$5}h[n7JNZ wj\`fႤ8kpG= wX/xbjcϻrzxBgsDs[$=qNz\;n߹nWoI0wY ŧY! ~wR0yѝN[ޡgRȆ߈լ>8;u9 mC7ܚ' gv^ %~ԟ5O:M?v@qs4+~[L ^t䞵چq*$'kTw׻)OY<'CkҬT|gkpﳽ!WB;ww>T_@~yk?/?qgr _kL?;D]4uk5T׵r/Gm;cb]E"JqVJ_Ka,2:Hp%WU.K͜aLt‘GeWґ!e+ZW%AXR XD.Ֆk A{RnVT2`UH4FBG6>HD!}vl"[n"{̸N@ORĪ[,=pH=b Pc?{иEkK>2*%w\7zWi9n l9&G/ SƷNT)"VTh^!Ylw-O5hz۩`+7bܨ*n = '%5EVTjǮKh{Z0L( UsL ºzz6ܲjz˺9zWX%$cW7=NBʧ ;&-~R$Y3~Z7qlRQô1 M8X^P‚`x ي9(OQFc*;"` <21E)bb:NA*N+1=ޱ2)M/0,EEA L{COWb f6f֮G}@ dzR)ݳY!Fex+ 3 8>w-"͘,L[ƞoQub Lj<^r.Cd`~#X[4xHfxλxq`m n A#bd \.S = Zq3#Kзt' )lՂ&bQx~_8endstream endobj 253 0 obj << /Filter /FlateDecode /Length 4143 >> stream x]s 7>u|k~d҇MN(%r<ɒwOD4K ?_עk?\ޯ]޵\u%W'EY<[y^N@v! k6vByi:tљ`Zt^ͳm!Fy7nNt.FVtQfxTg^uZxXN{ :-lowsJO1Qxտe^lzi o\4FUN;˭&; &>2f(uI 2Vu։`ym`Ja=!5't r23 F`&8\s !}prw$@rN%krz Ma.I9'^QLɆN;QC :RT#a}8aAmw ҉ȝg'Mg2؁j0>R.erk _ FٿIz&AFٸ{/3@L*Q/I>Y}_ƛvRh  ~x\9jnͻZ_ǝhc7i CSМW/; yJ=5R<5}ݗ}+Rc5j<6EzJh_7qT,U}bOXz[3o i(W㜏.Ǟkbψgi̷n:as[5•*[M m >>".nPRM7Zw9ECt /܉`OeN#פ4oW-EXr͍)J;zOwOozeuև&(>=ܢqAh&H1wJ 6}YI` |Da0''߬N4}c_Gwc#XlFmcΫ1k_B󸵸>aAW}R`|׷-^;"'w{0yOn]^SNZbB%h`=Ť|3M쎿qZQC Cb!0UI?]0Yalǭ406 Q!<U®vseVp1!cET6(pBHgdh&4H\D6s 9Péu0w )gtVND$p!UA=f%2fJ$|g2" <)ѡ cwIh淣WhFO4lΖVp+xOuu\KW,WfO0`ã]ل=`|p@Њg$iKw"^^:msf!Bj x5^dAb;I4#/]vKJls: Җ[F>.ֆepMuQLw?|JNϖ=O, OJN=5$|0^ʄnЙzZ ],[}1띙*Vv#űP;"VF0Ӡ*ulhumД^352fJBEx$"%L l":g}GT' vςp<Վe;G1g9fx'➠Q `cI6)(4 ;i;#MF BXr N:(8RC,l-{m,L9o0-5(afb^z!mp!扙%eƇOk(\Gx3\zEu..cD k_3|,?i'(ǴȈs_iͻ؂x2C&РR$XwgyMm'.Y6;)oz0B9hQlZIp"eWH95X S7 1i|<=8tOf|"Gf"-wZ ,c(mM-DhաOdÖ́]~XuEdH.f'Zcȱ7)󋋔- Q3rf8kFz,w6Գ,qD ]%`ٽ7BWM˜"*DW)9IHJWt֩deH@I`T%&A]d]=Bsl!$ c`/hƴ<{f) ȅDgnhESa״r H'Q?]|X2SWh4lh0|c*M8z0xiDV*Oa ^b=X|zbJ0MG&1`n| H0R Jcg Y'x))^!OH.KQ1ne|#Ua)Ps^G3at}e;% A7P~0XϡL>cUvpriG7vp7H3a/N&?6ˌ۟ ZjahUzK4ߚf8Ʌ a[YKno:XXb[O]%)32oW3ja ܱ6VlKYk"n59d0P ضoxf}NĘ Y;ʛ%wgtܙ<'Z.^;ow?^g yZZ sW1GN5{K׃C^pY:+C )hIuL .xN7ȠZyOX>)8M,ps9fq8qTK8+?7096Np9 M/0mxz!4aªs,:3u` ?֮\+#ޔb#{ǯf"&}C^ =\3ֻdVӳ5%y(e>D9g!uI̅ $1)n"<29Y1JpF;(`,3/ L25~F^f0qEJXxeE}. ̷k O_M pJ k?6Ha+@]F_=0qSԷ K lJǻOLKLS<3*^o?CU&Mendstream endobj 254 0 obj << /Filter /FlateDecode /Length 3574 >> stream x[[oT(?L~Y%/H&ahccelP>7BG~cK?N~ݛ 6x?/,~UaUAtrpO}|xk=#Og@F*-P} tҏphdaTwpΊ230.+5xǸ~yt`ԀQ%ђ ^{onO͉AI*Iu+쀺wD~>תPF?ɟsOYzя)I)C1c8r{B T$OyϋΛ|JE8^;{V߁փE9-"=wtlpYw$Լ]F?Q2~MBpE8; !`&4ʉ)^,[4w"!χDϋޝ?zr+6PIՄb:g2zzA۳p\UnYQ\YV$ÔE|\4ݬѓV^Z5| `dII8ƍQ,گCʣ H<)R?. bStah +|]7Nҩ}tR聁@,821g4\08;#20>tH@z0E!A%٤EhᒻADE_ $hfa|>.9@5Qx\599@%}2l L(A`p{Xz#] 3Ǎh9!ߗḕ¥[}0a 7@ F YBb,s]-k0ro͊DzgetSF3 4u!cUA{/*B$İ\lBJLbJ!&3pU*2f+,c}LҘMMy, ǍpT#!qa/}<#xBkJϧֵR H Z@O\9TYb• M%@vH1տBurQDW)ur]߆|GsW9|en2P uKevaKjpVYҲCTvጾ "n7^ K.9n03͉R$ͰG SأS}CQsUǑGG 8Ƒ7G80WGz78ő-\Q3-1yyւ 8H)oRqAn(tV`;=X!0u#Y!( ̩sF]~PFgflN1pO& A $D #$IKOE|}ĥ-aHWX#M BM`zeLckrep?-bFg1ːqi9 >A`C vL>8@!$ pYyvp&~ZrP.1_@G_AHϷS q^(ڱX*tYc p(4 F!mD[ȨLfd*G0c(Q@2MLIk:heev1GmH*E}8mj(qRY\`M$#G6q=UJ7 R\jMq 37AQ|:nGcDWk`-c7ތԣYEAVi'G4G[[H@L8Xg D;hĸ B"kNˁUSڤRYĭf  ' Z\6j|+PGM2ЉR "`i‘]$D?UQ7P2S/zW2%DU<3VLg 8cRL%wMWfA}hcgDrFi>qO iĚ ZyWQ5,:(I1DH0v8gQb܀Up6"p(GMh2>7-ă0kIx+6~~0߮4[* ~3aB`Lne#n.Pf"fTF98,Yu\XtPZE)`Rs&ݎCM nIn|X,ln$SNZU}mᾋovnJބq8{kL0w)Pn *ϻ84MxLjsUEqI"ܕp[DQ<\B.S FZdM<3Y6Q$Kז+ )kX0+;Qt2"e6 h6XzYߐjL*b`tނelsD_,`Cff^~}].3|oIrEBK&(xЙ3;BTy<&#ȘE_tqB4*pEf #Q1T>[j! >"j}>BNDtYH.EIvE B2ԸlM .0WD) \ Pk  +,R5.7Mêo&7lNނ4,*TPMia#53 Mv|s_fOW H"թ.c!D@p*bb.FCiJ4juɣIXf>iodj{0}G]8J_qH 1e9Zj6Pk9i Djt> REK;N3ԦL-ZgE$YO3VݪִRY|Lm:¦eԄ0dZB$!~fanau) =qFP}`bGv@/M[. R4E4 5# TPz|Ȕh(#012Lħ`&zB"|hnX9HGwAl繅z^HZ#h]i#L ewmG֍T] I9ptBhtQϳ fRNJй_p*"U>|tWmg#r[2ej`|E2>$GPd9E_J*>7\CwɲmnfCSoP?|Jwo܀q{Ӷo0">{[JAd,cHm㟫qP@ؾ4itl7%$'{4P6O?ع4qbQJ8(s/vp> stream x][7rDp3@7]fd];>UEYd <>lH $w>s߽z&肷Wg7MfBByĤv壳˳|?Ga1Lqov_> EG9{Y"]y&vOޜbe}b >+=he~m}{Q۩fe}P߾o&B(\_L1Coe_zr=9&ۮW R435=ysˏikƔLn2[Ϟ޾÷HWOPv=imNM@N]}e}<2Nb@TI{.ڻf2] b% )@(wN?S@wͺшfii^Z'!v?.:7c,y)H&rAǸ0NʟWc<5) =PYV5>2y4 e}z7W -(a6yoL3n4M`;]fg `ЀrrHPׅB"ٓaf8~< O<޻LpiGԓg|!?D`h62D?u‡; x*ߕޞ;:+m`ԍxeN`4ݕHrednQtkU2֡K(OF@u B 챟 $ӎv q:6K_}ܓm[GI<`n8XKBlHStNH$]{ UK&1ʁWd 5s:?P0 )~NCI; p`끯smvziC}$XPD `e.5aʃG6.DJjWphnX k43QNv"2O\X#ґ˺/ߜ e͠#sf̙+9x8쫞ou\IpxvVtAL'EkfA#KJT1\.[fa̠瑉!Ve%t9mVdMB(Ɲs~Ό, J^+._}LnJ),-aJ" c‚8?m*!Pvp!vvu2 WqQǯc'ao?oA;/ \m^^+n*#~ [1"}hj"_ИFIea%וu7_sKME,&hmeJ2/ ZmY Y^jpL6L|f,JjOr&%i|iZr-sYDŽ1ǡ6em')!jp[Y1N05fY+ۭK pܤJYd1Ca׵E=pxQcD`pۊjJ[ʌdnv^*{k L)_&sU2F6㦱:53öa,SU@:L8H^ejGdz=3鰗pLwje딒=6n>r{|Q*klؽJKE`>q|$5 :lQDWbUk@7gwW$g&8!zT܋3jdwXx529]Gu^#,YNJ: TPqHiV059̏}.{{uayK6w9L+>_Ui Fej :jɁqto׭>j6)m%ŵsuIʁˑF!+LR9Nr~!%i#=ќzG5g&NGHH1OBie-+KQV;DQrX\Ep.)BD5+9L/;`ѣQHdi}1{J$!M+C#)uXF9=PugFbgQ%!J,Y4y<ɽ +ԖL ϹdD@߅ ]YޮXfs V(nڈmS9̉nNӣz&:5fe% :Wr(j`&D0p<2̑/))@.!sٖq9eȁS) XDLPiu8Mo Jl7cX76K[2XJ U -hl\g.k2dP)+.N6nTI ^ Oskژ1t8rI"/Wg~xzp,9 )-S3>³K6U_\7j{ GFd=Lh4Ra$/ciDP\܇,|]zV 82ô5%Ǒɍd;|Y:N\$Aq6=^N9"33vMZ]Q!n1;d|[ XI3Qk#VEeŷ '$N[;Q &9HA7ś!H-0nKa8Wim_TNhY][95F'w\YhFf7.a %o|I Mk+}:f"G6UQ5ko^jѪqĶL06['((Tœ;\ aEu q#^ȺX5_L'RޫL蓱z bSXhaucgjކ?!ʚ1#t82׹+˭|XjܬtѠyJzP$9l1!APJ;  *a'IJP11n9l sx zT#;orQY݉oa j?oG4qu'鈃0t.`?0nx&5 4 Ux)%Wf,3 [g5fʞ'%H8sYe+-čz:vJJCϺ mV>R"`TSy99*9b.+7u eyy6g Zj}+,s[}avl@\ɐ&\Mg[v-}n/1}$u>z fqH9L$u9LNxcОGIYJ)fY X/ts zfWTx֝_CC HZ1K9VFD$f( ɓRNH:-Z֬N q/??B0^azBd,Yqjf/ΖC^;cӻXfIRX`[/ Kۈ=~;+:7´K.8}sYwc$,%vrE{0Gzn#'ن ,^04 J]J%p,ߧ+,i `uAk''Mi }R!Ly ?i!xFX62J%bNMUb0AnL4k$#7J?NȎS`"ko&ec뒿ߺ4hjGx=ýaȣny[X%C&󐎮]rtO»z9'\,mD L7HǠؗ+*iWܔ"Z A^CYvsV]؍(v2T4縱r?MwHwE0s^`=k R]#*􍳁C.gWڌ x^kS{ gǘ62V7Exb-*X:,1t?GS9MQL9"']""ƍa('\; 옗#N`_`aX1okFx|!4}jŸStM&pgnP+bWpX!N: lt_&1P?R/abiw( M\P; ?9{ ݤPz`ik2}&yab#}zMZvK-&,q9t5ذT$+t)#]" Ё]|G,W&Q[_ 'kSbAxN,8 P,2ɒZEJE" ToOyjQ~tr!vJGX0tV=Qu`r[t Z^mt%D䦎.]Me[u`XսnTYӹt!+yWܗR*wfE򲩡hsA^-(֝}[ B N|^JHU = I^qTIkgq;^Fendstream endobj 256 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1300 >> stream x%{LSwo)4&޺Eac܂TDJ/Phi),}A{}A_72 EP` q)Cb&t3Ctlew +sNr|`1@ Xw(=m%Ľ+6p -T"!b7eCLBg`jVk굊rtKVizfmҌLUȪy2]9颅RzT] u-{u:MVj^!SPkm&+tԞ'uNzX"ox*MJrR[u0 +/aX&Ve`;4չN0cjcs8{}, "KF_;(0) M|<v98kZ)FCfF0twu/E224P)`kی.Ifa+[ B2a\<~}G/N9 33HTPaWB)β@ouiKG(PC`1㉜ , l?%bW'8m6[͢[V>7t!WL/_Ї_zr[)qt%^ZS(X_;c=pZAVf8l t|^_0j(qt''K_z’ĥ_~r Y!GR(qڈ0~+5oP&a?&>F 7vfZ)Igr8 MLp>C76y˯\=ۉ<&Jf|t|b"TsM6ڨO\Z";݊8s!-\tBN/3}?@Bآh̑ˉtxDċ,JEI כhz!CR^ `~?&9exJW^A# uj8܌zPBޣMeͥy bg jLYD{l +;|"QsnD5^@Xv ccw11|ucuXɱ;MJmq΢Бy#&o\`VYOYbC£Mn `֠|1$O(5>CLAo9{7 FJ#\~<>ra̡Q)GGZH2}=F`endstream endobj 257 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 436 >> stream xcd`ab`ddds4T~H3a!sO nn?䅾G ~"ȘW_PYQ`hii`d``ZXX('gT*hdX뗗%i(gd(((%*&s JKR|SRCLYiֽLJm?~|oauOt/^Ҽ7;%AqRYSf-:Ro2߮]MmfN7Qw5Stsle[^#[ϢFnҎކYWu/_޽gy˻u\1yV™9==}ӻ'u+y Nʾ|<< .endstream endobj 258 0 obj << /Filter /FlateDecode /Length 3610 >> stream x[YoO hhއ<.& ,cId'N*Yf,X9l?9s?>wYǙh90͊y`Jog 8 ~lVR) 64 Z7?,Kݜ-G+,bi׼Y,|^H^,iEh`Mp,oNwV5owX4{B2cӼAz)`ENJ`@z&|I^]at;HIb)YpH0ZsdG )awuV,+5xլdFs`]Xk5U?D͇y c53* idK>`]K4 \,Al+j^уʍl;*dRY{ҕ2@&Ǵ70U ]e [Ye2-F)|ɅXeiB34|fCP$,@ N,II%? 6̫bə2\+ {fY:R7Ďϳm[Wg su{ 1ԣ o+WRarEy!f% u;C<SqΓ$bd xm&,06BXc6P/#j"&Nr]4ie@l (H.<(at"?5=:P_v"oj> AT\-#lKVYn);ٽ+\H2yȕqUQvQ 9 s>l [9 Y`mص>ǹDhiMiRgo 9&F)_zKJ"^(!N^ɌJY$Nحp+%vZ^R/[tuIpk$yH k0{x /aVV޺-(Uԝ j-f_́hC-SBpa2i`=+[īme^>1޹*H[08>Ij@4pH2_ȑ LQh(n3ܤ^Q3uPf ; |γZR;pG#Fd9)_U")MMt@"sM^QHQvib{ʤk672BU}.g,'ytW*>z#nH<͵CFv*)D{yXEkAd=+NVrݮG;e  %O⍱ hǝMC!W5IP2W?Eed~0B"FEC^jEZh63t \Nts0iX٭]E278’¸|Eb.zI Dg)h&=1-ՏVG2B=gy4WA+?;.~3.7 E|)SuOWJ3>Ȥnie nGj:^ TAza!W)k.(Zwfÿdzst[;"q)/?ϯ.>=y.f=~GO̾{2G2NArMq!l4uBMܱ%@a2\.ǫNe_0Fs{r6#Ť5Ch ٺvn8e}YcPh$7XL eU38[pN],V=!k$x2[nP4 VUmNADm#SJ~_dza_v(\jyJr?άk:0,^8(4"fKv>MWjgt9Oż*& N3'_ivxI$wLHϔQI"0.Unt6u-HM^i!iv-WHOȴLxG_\K͡reFsVuK!v![paM_ e\S?}2}cu[yU銐GN\zz66wqM4@`ϑޛ#> खKV|ăNQt)7խcrU2iq^$_ @CZkuy34uoelR=ly <$]sJ1WK"+?.vˇAeTǣQKagP`^o[> stream xnϋ~Owh $mڈU$.&+K4A9$g W]"ßK'/ .^/~^oW˵ KbK'zpO-<{9]ƭpmYaTWЮ88+(4 *jU&=eXz\އ0ڼɘF9!hi|55l8x?W4{Z+1: :WX5в ̇m@ICLEq^C G!PҊ[hۀol$$TMwqnS ɷ.p}4-;v(^#{F1"fq~@Kx-c/ln3e&z7V rXf-t@5cpv:"J>Iz$&fGgN)Nf!>$.Y#W)<rR^pxX1Vz8ɪ=kcj2mDx$Qvc3?Y0cN~RZֱ(ڇDSHiRkd9- :bo7,Mf$ڈ^<)Fȍ)@| /`d_\Y ,T(nI|Jfw/+҂`sC |EjP{UJ3BY2-}uþ:M Rf !>KB Fo"! | `S" ~P9GbmVih@Z0Gy47uoĻ)$ =K4d坨0BYQL ?w18UcqD4 *ﰥڏG*NjLRp[>1آhzAp~!ݣxᄃ54&]C؞b6ؐ{L'y]4<D:`Es$-x=*̻|;ѐo#`a@]9D:xL>i74R=50XMdH rSnLjMfyɝډިƑK'(/kقK`>rXmy l3!'Ks ~v2)1r]w+YoT>6pinawDW?K:/c%[Nӟzeˤ ,9LfjCo,/Zᆆoix=(LˎK[q$x!#|Vb8=#F9zoK-uǮԄF_#f$v%Uͼ U6MǎLnKyYT-3k=B λZߛgfhP>o>@C&7C;KRuyg2-}[& 3֤=Ou,y)ƛw _mckL8;T-,[7ء}IF#K*:vjj*O} P_iGѓQvzGĖ2We?G_ai6&Md*ͬ9;ۺ/O捧C)xyK Q ׸+_*E~CI{Rh@Aʉ;$ 9I 5bWwG>2Ԋt1#1Ž/ q)5 N/fw3|]~sw~;CFt Ǹ=y>( !fkEvoLW=LWendstream endobj 260 0 obj << /Filter /FlateDecode /Length 4086 >> stream x\Yo?bAnm}CȰGe~@q)r!,9wVH`t]_]3列)O'O^$Nɻh}&VOND{S'80x:y& ń2)3]B|aܨ|!Ypٺ`YP\0RU]lLyonA2:flY\wْ o^1/4s<o)s^. dF:޵r)gԿ=n̙N%.!րZW Vt?Ĵ*d 5 8r#B1γ.q&.NV:O3OA>#Q+6 rYΪG[P= vP%t%>M+q 0 ؒF0{7i"íx VS轓K]ȄVG5/i^mG$v hH@o|׺Q5ԭ՚&Ӛ7h -Xޏkgߥїh'n5ncTùLdZedbfNHO!L\amFS) esvF؎ ZQH ffEid`>Ic2:j,s ~Z' ,xS=]T"%d㤺SX |aSn;!Z5 ўZG'uq)+dW2e "3OչD-:-g^*g ƒacuQLk0+5 $079< į s>.%f +C(2u?FhEF#4[=#4n17:^ˈ~Х)j$L~N&hpƚ {h1~5S.2P1g"EFwi&5iF+2v%_ tjA-wTc> #&H:ZVxzμEZUs4ޢj;!e]3!YP1;vqh&kQiRftg mEBggm*̸g,u+k"f__+*LiHۍp3 ;-6`swwKCDqG˪[egTfJ?wEAV08@PӮ]mў H,rg= զTH0-Scl9<<{1yG'?Mgy&_LS@r/ `# <2#U^tJd.I(Ei/ͯ`6T# ,xI$$M[/yY=i9N*IJS)Y nl=Pȟk7Fqr8X7I M+ր{]}U <+')c5<#X'YFmپSc5d/b5s+!62deW4/24{Lc#ҪZL:XF@&!Ȃͪ?؞yIq q:Wv( ϹŌlm/m쳲m.E/37aG֖5Z6h2Pă$˽~(*#14ڞ_Q3?0}Iٿù}Dh4Sb 煰K/-z!v:%{uөe\%%)v}4x&18zVQ@[/sj; m+N:6.~"5iL6<ۓބ?(]1ǘ[ZIJC2Ll,uBAYd=Dz)A"-qjcoA@bh#UM%6rtqe`Zu(TZZWiI! $pfNzY^; ֎k`4 «>Uw2mXg4mȚՐoJ|=ӕaaA'?L%BXV42VRw6l"茻&4N !t 5;R LBcRlk]1@Z39.GVr7vV`c5.;g5^J5iGvB>lQ^RI^=r$diO#BQrkތ!IFS޴#w ty/G:@ ڴndC<K9EYB}{#K[úVvk_IRitdkwʛzמM͌l nl|(0F$JlڌDߺE5[E-/) aԁO^ Ȓ a|\#{Q{ >zf2;n=QOa][#%N(yo\Gpd$ p)<=jrP[>&u#L7 0L:і5x2ŷ܉id A7{g1,Fe?lB߻k>4Fҹ>+f u:uAu)sS@,}s1W0dR- 6< QabO ~FW2pvqKQ hsvs:;!ῼϒքaȋxO[UHs/R'Ix7m)Ы~9nݾ ө  s b.!ςJY y~!Zt?XZQ+Ԅ[,یIy W4t|!F,*+ %:r4%͠-uolmd&UkԪ2BqޑPͧ(MRXsϯՌ kbq?:(~|\TAuns!%%?EqlͬT|l{5}X9 J)Lм2UK6Ctqt   TDK"~\԰1R28sUGɞL&9O+J+7giqO.\*q;5Uk#a/ܚ^;]ۑْrɎ,7R[uP~jDn핐'mUUWP׉doZE;w~2g>dtsлmi Y!8RgUn|ǫҡ.k#e>4~Bendstream endobj 261 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 212 >> stream xcd`ab`dd v 1400q~H3a~'k7s7aB_?``bdr-(-I-ROI-S,H-/000030dg&,\{cO$ZUUY!|gWwOq{ڦnOe~{Є={vO\X9q).k/4{NKQ-endstream endobj 262 0 obj << /Filter /FlateDecode /Length 4410 >> stream x\[o7zTD4/M"N4qU!ɃlI$DzE{gxY\I€Ϛ%sgbO}~[^qwf>y_Dpyrr&=ٓGOzdѷ+ 볯?OOV4J`δ'X/E}Uv*u1LI:7ѿ_ֿֿ֯Ftޣ5 _יY~2;xhƽ ˤ8fd+KP'Kpt{&Μċ#kTJ=3!]m^fx0[r`GZ1TǓx^G⌟kA5S:?wu>6ҽJ?ꔟłoaNcT詸&F(8 B_( hW4+@m&CL{-zC= q.O-|긒pTS)GtYO+"9.>`He5,P!>G+rRJK ($!}+^e $O'71 cٷ̙p*lȋP%3A3@9ˑ-<u@xf <.}4t&[o7c'9%ܽzFxU 1uparѓ$\h%d8hU E`ke`!Kv`@ɬtjM~ SrHfЍiMZ,14rYV`^< k\ `"Z>u&Iy[xD߄2Ui :P`rrRU{,D{~'8u)~ůD8oȳ`څ)Ũ $&sZF%XeP/p~l̂okct-ug. vc%g|EdYd%vmpCu3굞\+~5]++ AQZhnVnHfUux8&KUI ż>LoIYbkKJ *؆FcR,s遪R (ɔ"6L:yΝSg3mj]D~x?ð8Ӈ嗄"NtKB;Q~y+=g)>N9Fqv2>9Ox52ʒ0 ;:O-^YQx$iV368Z𔌷4)ޟN`R4roV$n:*)aVX !ٷBT,RP[1a"!d0}E})䣧&c L bzs+NJw鎆 bCA|V[OVad̤6?9ɬ3x ]o ^KD}a9#!amP>xjGz`A:ION||xp)H,xϜP)ۖܲl&Y ?L9r$i%r į!u|wpy^ (k@[wB!:쟇^lL9V!g V^^UM. eDp7m$$O& %DQ9:4!|wj];d7M/5-&Kv(FH$ֽOB" U!2 휶ɝB-wfNn'Ŷvpq'8.\U;^-#/@CHa1bZ&&JeJ!Bkn~ɏns_PBcZ` zE,oRk lޤ/+Fpׁ71c*bDs#P)RYT0JKr?:]= [mm0a9d 1۶R6SuTRSKPnnFkÚջk"N(kz 3<{|fq5Vwe܅yeB]?żj)`IU\hkU*}5ǔ>:cXѫr:H4UmeaVaϧK^}lK \WPL"H41_r*TtV2,B\o۫LL*{ܨ[->})ՃhOהaU+I+Swv_C'j(V{ړl  ?>"8 q%}܄H ^uT&:ǗB'pNI֝͡a}{ůR;bEbЫTRByzR|i-Rn)h9/ZǪ믳5HoCRri KLjfWa 57bKSr[4Z kҽqøYl{1AL;]b8;c@=gkk,gcW ::c> stream x\[sTT~O_6H*%N\;6,Pt\ USuu'5?-9Kw ΂,˿o_.~Zj[˵ \1>0NXΤa<^{{ZŽGy/1J@%-D's||pZ zybѩ.N.Vk͂18CVϒ=^N:&W`JH6'z]e4ya!(~-!B.'37Yu Xojuw%&%4=w;,ZDoG=( d`:Ij&fS`BuCSqⷂiM%ItRfF 4n eDqM')h(ݣ|: 9^^ZoFqCʄkRiOQ#m(tw\fɐM2~E#ux @얒a'ur \nZ6grT^ `Acze喾 8JO&o=1dYqn}汨A{ a&'@lcdK['[Rw1+Bɶr˚MuR:*,:}$1jw}xSK1ut_|ĒckueNj1b<3^q3lˈre$s#B#,.0ЦJ{:j-';+@L{-6LeJʃ ;c,.ŗD<䜣I @u+GDo9w%:ѱ,/ MwQX`, X_V0g^x&9Wהn$5y5X,.﨟wգ 0iU~}UgR, 38AdrIn TQL~ D  rTpQ(,h[k4'YvBMo lJ*KHDZ{[{#(`[؟WR0!،R4MlG|?lT9pvY'A,$@@1Q}(rH3T~Z_W( O7 pzb% ֝s =uó(d xrN)A59T3y٠;ke4=QdP"td0;SuN"MkճDe㤜]-$geOф gL Ϊ0,f2BJ?g6J/s1\}$t a?H } ˰ok` )6q;y> ĔEa+)U*V0CgCB_ؤfx$1AtFURZJ7 Tdڧz4J`p@zbZ+1' u̼g`n3b1OSLln$m@;yk[(\7w3G 3iRްȏj1/%R-D l}K # 0V`w&Q+^kC@Y*ydѕ]cv3ߑݗ }j}l Pk5VMM0b{5(2VE+so՞r}d;{ZL#f'p,oA9Ud_5Tm1j̩2͋!! "e(zf[gVIֳ?Z`{4d#N4gendstream endobj 264 0 obj << /Filter /FlateDecode /Length 1765 >> stream xX[o[EGś_x#PT@iS'Աiڀ32:q+UjVsvg}3~HgWgYI| {ilT3 K#99~Ӽ9ب#/GSXܾFsv՜Boho٫Qkjj"fv5z>~0Z@ M' a$ݐ,E6:E.ϏІW$='5IAbx,}=}ZKs^myo,|ؙ¹) ]@`|Ãj0/UZ)d ={ZpI9ID>^MKQ Y!EDR[sZ矷Y_ڻm}սhy_9tHTߞ~һ<1DD}^Ѻ*ejQF[:(;[n> =uzXЅt m}8{PRRЍ d2;"Ny,XpDL2gށFz0Fhs@3X){ vbXꄦk~_O7 DE1NJ;n]h*o=$?] SJ+`˩6BҘAn K1Xc˝F%m: {^F(`MtW` ht#\}n7ܪVę %FMyp۷i}>nefWϛ؇*"JZm6{=V%43lJGd Zz84N{"s9$Yr"[\40 %^: q&=HUlTBvEd:0lVc|$ix@OYxȝzZŢxfA :0-Z孺 IWRtFU2Lt}[:J'#6@[KCBxtWי(:є;ʼH|Ѭq~K$hACͯń@o EUQSڀlG(_;a{IxYGZrµt&YkPHvF >;ܕnԾ5xmw-Ta5kEhWܑ}FE"H~# +YZ k>4~xY:[Ƌ^ɛp+Y c{8pkp2CV(fWIjs.Q{^ O3p\qCѲ"=Z Xf66z;3 ieLȩۚu7$*;+;^@iXA/sWM .e(G$n׫e|:*$vYަ0#z`)1eOI?#8ǕeY~1 ^ \[kBcW`W$ 7ca3}'T]{.sElE&n&Ƙjb'Cޢ&m͉H j Z-엔JnIۢ1W^/" aaYfa-{zhUk m?LH!Fi>v'-V}cuv[&>įv(iϕk/b%kwwZ(Ŵ,V>kII#8ރjg⾕U˺RA%2ܞZk7\d5|9xU/Ϋ]FD}PtL;\gT6,N `ϘV]zY)nr(?mܿ=[ 6ev1ϖݍ!L 5u&a6$bGԏ)`b1s '\F\] e !] o|JmO:bי )zUH;ňeCX*HjcY:8jή@s1Wxc$dNEc{NàrflO,1ƅJ*(x|x;:ʏnY[s{2`2޹JAR*?5EtGM8"C0q \-N19@'&Q4g$eOOt3-%.8V>Y+ѺQmlT)R2;M C03}eKz4aԩs^1_vDJ5NKeMjH 4ũ+"O=YT E%1͘=ѫDO¨' 5g4͍1ETC˩QX*F }2n7Bh̦)aeIS c^c0%аfȿ7T+xnQew ̉+4xΖƎ{їZ.IZl mg|ZƲY]G֋K)v`P,שT6iHJ:q^FdRa.)t-MVV1hYD1$2Z9t_1>QnphG۟ c@t5sBbv}{j\b2V pgCh߁AƬ[*aR>зuvcK/SF EN'MW@Vg0%Erug;]shʈG|9m࣮NishƷɧFX]"`g=5x)@,&1֯VG0 W|rӼr`%5/ERܩcB.n;4WzH "@M"r`CI!N:ZN c]$qQ8Enlj(> `q$yz\wrl[MِP ;&˾].i?iN n ҙvCO;1r/-u&KIYJ8% F]4 Qo, .,IoFH=+AȆC!(@c$IxŊc$[Ĵ TḘ{,j Ō b6gڇrsL wd>I}źtA :hb;U"ٴWR[MDdh\K'=EpEST i( LĵOx$GEKELq^qX4}9ˎ.Dx5<>iwBi3?rO{'*3+m @vEm WM wlY "% ڤC}d1nfY&c!3?%EºHCWڠCCjjߛa|֫7iNX:,UJP0XQJyңz L 7NMjϞF*-u#pD`!FãP)ĭ[Z;9QR"0#.:X5fїy9>$m7>et:)Gy}s#ÍkK؊5z׾YLmOXc~=.JK/Dv>` O9љ15u%GKWBJXI&cSѪ+&"%*#Z1MF`1 4͟9&pDX#;A*wMbbP9Yϲ(Ne{C_F-] ]SD9oMl9%g{H9lC';wiHdX;0-bM@Z=6*\o#H[5AVK|˸DL.>%:iҬK{$"5 rvCE(rbFIU\F믪 C~(ғd)MM}qnVYtб-Cc^{C( C쁜{Q2ͬՕ4T(~2\s, vC[^O%PR"L4zS-R\\I]ɘu>!+.Cgߴo= `Tı})٩s^47œN0lB8H!R f5'<̃Ndo G 8kD$CJqcR~:r3hg3&cr+kV5֚IG81c:{YƦ={ϸM!␒a!}=؊kAϻ9:`+ȹG3xjm;sM;M;='ޓޗ󍘥M0H9 $Dvh]H2Jo3'_K^ ezӵێiC㘂%8y5:F8]!&F? Xamgqxih}$& DkBF=BNZD[NMʹe `k 삮V sJ+fخ5"v$nC2&*Mz'iIҥ>ؾM9>%odjOHa"ZB1gC,\2d"7tnbVשIo){\Lcvѐp>l y'<8o(v7M>A=q>h؂z2sruk(jsU=aa (QuEBܯz~$ =-ΥJ&%s{`w 67Yz ^KAkATW걉`Kڋx;/ǨX[&ov'rFkДٚ([Y[K#AlfvU :m:LMErj~14a-p2(cz~YLeе1U tW)PK+)xHNE -+,ʳYeJ!F`Ph26epġSl9G|qhX6NSh1Coj[m3y&]{S˾f7%y\6Pgƒj-&`JLbZ`{ѹUtJe2g|>#f''a"ӑcm :uJ]Ǹ.C0,ћG-Qu:M&4sy6zOK|=ྉHHr;bd`]= }R [` )JHՍ6o>`ԓKMH׀8epq{|u8ЦV}?AxFB4<)rg'GvOyF>S[acrc{[k"I?A܋𸌙~ӆSc3R8TOJcHfCgk 0sm_/S֐.sT )]7 Gq ,k)51Xʄxsig;٥~7ln8V M< a=*ݫ+eGilg棼s-O| V0!@O49a&?c.i0F,hngBf %qۙ)y롥!ks:IK?Qg% {Fꡒ 4JLf7(/A43!yE'gO6f3ƫUW^"׌}VyQ>U2ҭex3r'ae&5<ďLh5,?a ?y0[' qu^ UViR ҂sC7klΛ? 22ٰ3&ߝs&~70!^ |H^$/B'hbƇ!)V0Sf@Ac`f`^?3ZYI?kVJlHP7%Dwm#KgCJY|r d 1ØխٖJZ}9ft^MrƑVn"Ggj4|J=Lrg}Vg_0(do%+dž3tLI?Z#L_^訇|s0S M 韉ON}M@(K:? PNq~隱]4a gaq` +y=[%<9zDt֯x$mIW%qÄ$XHTȽ@G&ӝE,(7bT]iۤm1JQ[LGʗ@7=<眛՚jHt?xbm5endstream endobj 266 0 obj << /Filter /FlateDecode /Length 6308 >> stream x][q~g_rN<ɱHDY ,?rC.Ey%3A{Z}PZ0ٞ嫯]_1/g^,^9/+x{d| Jc!,)i'ٟb~X7g+E6Y:.p៎rߍtЕθ0̩UgĦi-2s[iQoZ6 ǭ!_"C=š¯ԓN%Wɴ>wτѹqaZ,)V$ҷ%:̘ 衾e'oC/EA20Y:@>; S9 [j 60*Nz]|:5W+;vTVoOU^iJ47Ũ5U6X vB.6I:C7u6۝>'[G{/ +M--Dt7w Dȸ2mkUkԗq徭|ܨYة{|-:W {y0u+! .cRIXuL ]o W}{3kiWzR(/ 3Ր VG rXi[]ȧOo:9Ҕl 5RXoc eJy݋\Rv_?,М*B6"Z89%m&?:G1Iڰo9AeE$_e3ÏkX:SI^ΝY$t{BŴfV]WoVP%_jOf(i'k(%E a"X,/bnQ/ahMq">@ Lz bel7_6ĀQp"^M>I6{R}l_ڔRz{O}Ѡ>%sM AO2%JN̨<Aށ|9[H44W`;!8fimz@Ȃ@4pm5#N -Gpi]<}эj8i1 S?ߓ/tX+" '֡ؖhJ~It6@HcբAVӺq .0] ﴐ`[a- Gm~ZP${,ŽWC Rj16&66%; xD++8ӠʀO1p`yuA{rZ/őIM I8"|rGQӕ@z~ (N)u>}y:2[GaLݮX_Ҭ&*%#> K_)d02P=w@Q2hpþ!h8F[џdi3piHT0aa]'(wouu88_b4O`:㸽u6g%O c5+1i@TRuN;iOd/qW8K|*U{A#Դ%:1uł~j ]qO^.m킷VCfj,d,-XӡTW׭DIp QpxLb!dY"؝a6<ȷq\0XA<) Վ!2?:D mm"dA?廯t_dl=z#Yy_a1@0s{ưa-mcC[ CC"a 8j=lMidgSy׆xoYbϰctc@ A: ֑NqPZMp(5BVȃf,HcmC@l'M淰䑚Ha h@47 ; Dl*JEhaS8|u(_`5gV~;urηD@“H|vJ8ÝӫHO QdZ%M~5# -oQ8Qhh]Ȃ)-;E^C#8ꊸ.~{bzKeWr=gp%{V\NQNR=,pM"@S1r?*,$#_aPg?,%?0l& [ įpNtة&-ydk9:qn%5 pC*MO5g"Q!#,OeSĐ㜳%p_}U"ޡ2m<9| @{ELB/"i^2$_ ,49ȆLD٦sxu\Fmf،C1ixWצ# jꂩJ_ Iz6za.;7fIь >.915,(p͜-UBuB0>hG`W b'f3kU%'C?j?ǙN5QkVPn ~FFLA;du#CHI@#C`-KS^v1L8JH65"o3OA򰘤^|mek=J#븬ĥFp^Igp??_~3-KY,Xw p:kGMu>mZOag40xç{ óǢ 5UXq-V硢;YBi6݊8bbR?4b1: bCDeW .S¬Tק>U COX8@W Ɛc$v0ek۲b5MfrljPZg!o &*6s|5G|b|L$[;a;D95Zajh.ySdK!muI4E!j-H2rS)\n8)/l^+;[R`@Z'eôϔ 5Xթ&pBVzӥMiآ!,/APw rP).gXi L5ߖ()U43PB?EB cg8F8>WqH~Ylrڢ(R1);B;u9#pp&*FO2zhQ49LbV{H O>mHl4шFJgo ) 9|uZms+f7 brM|Rh0N _RBտ|Vh”^`O`7zIEReQ*T#PeO9R}-(0z$;z߇ |4NS]¡Vme`)Sߎ}IjOC$:ENh'߱U {;қÚrt0;iZ8&rPC֬IKSԄy)WP]{,%~))Ų]HPqb3tbQR.II΋ Lp*4?jrjR*ŤߊKzhf_0t@ ^ )/(PB`$W.n+{ cvo;IIٵYdj.&D:cgvcQ9CYm7r`@n{kwzU=/Y{44Gͥ 8iz"7+' yxfS76/U, Llz Đ!V%hW:V@3 \ˑ=*g@aLOj`G6M(D/9)5>ݹH #[? :`힮'/>>v.Xq]_rߝk#xTwH403+3%UJcvy1k(P, o FPe^t)E#J`v~*+k{'OėnGQ%Į?UI 3?[3P]RXB;;]9,1'[W6\WxdZ[4|kzJ3]FOxPrq"jRU6N0jnc<^ta[;p GUt8z&Ѽ?9Ͱl.Eձ\f^3 F4Jq W8 yJHjkSѨڱ2O㸍ػrY33b grn !eSȯY9*ɵ.P3EMBlZrׇF%}ƎQY.c$b22Y䄘| 04Gcݑh)HmYXkZۄuHJ)Д!ccev>OkK4^L<AS@>%mR=X04)X,9b+Ұm&*%\RGvhYX+?a?JOo E*:=,4 o!T1QZLvY_H'm}X} FY\8Ec3MYpͤݵU@G!MCʃ9^ y,!K]Ƶ ^~ԅL]QT԰eݖfo?fIX%"עp{T *B8e{(Xߗ @5i .JR>0)R(LK1 w䲫ct64l:"7+*Bb k^Xg1;3w"$=r#OS\VrnɞBf@w³R4:9%5VᵴӴIۣ m\c3ގ2,"g%]j9 D `/VZۧ:P:<+)|8K6ؕ=5d {@(4ti|e*nsN NivJ0[] ˦#Ui c?ehyҁL% I,N(ߜ}Y:%R7 _t|yܸ 9vnRq\g5 7G~PNWouzSxHF]*AB.ƴ¥Ø( Nc. ;>&O=\h*0IsMzIxx2 %0T C$@Jn:֟;hzu)j5s%#b9sܩTR| )jr 0z֘;{0`m^C.Es0-uun욖EX6){jQbơ)&Xإ7qy P4JlHz"* 0۫v6^Sߧ'trTx{LrH 5Q{&DCKѬ Ꝇ$w Mq5XVx0=qJY%06ᠪy\.Z%SEmlWގ/'/ёGںqcΥ S-fu)IKZtq+P+7V)9FC~D=Jr6Q2lQ(›LjʫvF.%|SE{}ϐ;:xKcC6>ykE7䅑 %E> /W [ 1 3 1 ] /Info 3 0 R /Root 2 0 R /Size 268 /ID [<6dbf0627e10a8ac594c153faddaa91f4>] >> stream xӻQs6nݗB)! BQiWPh"JFC!QHM_&9E(R}  iiunGkwQڹnm<ꢰŏK5} of*0] efXf6%` k0b.M-sֆ#0q'X30+P3f'Y1ڗj)v endstream endobj startxref 207662 %%EOF jsonlite/inst/doc/json-paging.Rmd0000644000176200001440000002225512573053677016530 0ustar liggesusers--- title: "Combining pages of JSON data with jsonlite" date: "2015-09-06" output: html_document vignette: > %\VignetteIndexEntry{Combining pages of JSON data with jsonlite} %\VignetteEngine{knitr::rmarkdown} \usepackage[utf8]{inputenc} --- The [jsonlite](https://cran.r-project.org/package=jsonlite) package is a `JSON` parser/generator for R which is optimized for pipelines and web APIs. It is used by the OpenCPU system and many other packages to get data in and out of R using the `JSON` format. ## A bidirectional mapping One of the main strengths of `jsonlite` is that it implements a bidirectional [mapping](http://arxiv.org/abs/1403.2805) between JSON and data frames. Thereby it can convert nested collections of JSON records, as they often appear on the web, immediately into the appropriate R structure. For example to grab some data from ProPublica we can simply use: ```r library(jsonlite) mydata <- fromJSON("https://projects.propublica.org/forensics/geos.json", flatten = TRUE) View(mydata) ``` The `mydata` object is a data frame which can be used directly for modeling or visualization, without the need for any further complicated data manipulation. ## Paging with jsonlite A question that comes up frequently is how to combine pages of data. Most web APIs limit the amount of data that can be retrieved per request. If the client needs more data than what can fits in a single request, it needs to break down the data into multiple requests that each retrieve a fragment (page) of data, not unlike pages in a book. In practice this is often implemented using a `page` parameter in the API. Below an example from the [ProPublica Nonprofit Explorer API](http://projects.propublica.org/nonprofits/api) where we retrieve the first 3 pages of tax-exempt organizations in the USA, ordered by revenue: ```r baseurl <- "https://projects.propublica.org/nonprofits/api/v1/search.json?order=revenue&sort_order=desc" mydata0 <- fromJSON(paste0(baseurl, "&page=0"), flatten = TRUE) mydata1 <- fromJSON(paste0(baseurl, "&page=1"), flatten = TRUE) mydata2 <- fromJSON(paste0(baseurl, "&page=2"), flatten = TRUE) #The actual data is in the filings element mydata0$filings[1:10, c("organization.sub_name", "organization.city", "totrevenue")] ``` ``` organization.sub_name organization.city 1 KAISER FOUNDATION HEALTH PLAN INC OAKLAND 2 KAISER FOUNDATION HEALTH PLAN INC OAKLAND 3 KAISER FOUNDATION HEALTH PLAN INC OAKLAND 4 DAVIDSON COUNTY COMMUNITY COLLEGE FOUNDATION INC LEXINGTON 5 KAISER FOUNDATION HOSPITALS OAKLAND 6 KAISER FOUNDATION HOSPITALS OAKLAND 7 KAISER FOUNDATION HOSPITALS OAKLAND 8 PARTNERS HEALTHCARE SYSTEM INC CHARLESTOWN 9 PARTNERS HEALTHCARE SYSTEM INC CHARLESTOWN 10 PARTNERS HEALTHCARE SYSTEM INC CHARLESTOWN totrevenue 1 42346486950 2 40148558254 3 37786011714 4 30821445312 5 20013171194 6 18543043972 7 17980030355 8 10619215354 9 10452560305 10 9636630380 ``` To analyze or visualize these data, we need to combine the pages into a single dataset. We can do this with the `rbind.pages` function. Note that in this example, the actual data is contained by the `filings` field: ```r #Rows per data frame nrow(mydata0$filings) ``` ``` [1] 25 ``` ```r #Combine data frames filings <- rbind.pages( list(mydata0$filings, mydata1$filings, mydata2$filings) ) #Total number of rows nrow(filings) ``` ``` [1] 75 ``` ## Automatically combining many pages We can write a simple loop that automatically downloads and combines many pages. For example to retrieve the first 20 pages with non-profits from the example above: ```r #store all pages in a list first baseurl <- "https://projects.propublica.org/nonprofits/api/v1/search.json?order=revenue&sort_order=desc" pages <- list() for(i in 0:20){ mydata <- fromJSON(paste0(baseurl, "&page=", i)) message("Retrieving page ", i) pages[[i+1]] <- mydata$filings } #combine all into one filings <- rbind.pages(pages) #check output nrow(filings) ``` ``` [1] 525 ``` ```r colnames(filings) ``` ``` [1] "tax_prd" "tax_prd_yr" [3] "formtype" "pdf_url" [5] "updated" "totrevenue" [7] "totfuncexpns" "totassetsend" [9] "totliabend" "pct_compnsatncurrofcr" [11] "tax_pd" "subseccd" [13] "unrelbusinccd" "initiationfees" [15] "grsrcptspublicuse" "grsincmembers" [17] "grsincother" "totcntrbgfts" [19] "totprgmrevnue" "invstmntinc" [21] "txexmptbndsproceeds" "royaltsinc" [23] "grsrntsreal" "grsrntsprsnl" [25] "rntlexpnsreal" "rntlexpnsprsnl" [27] "rntlincreal" "rntlincprsnl" [29] "netrntlinc" "grsalesecur" [31] "grsalesothr" "cstbasisecur" [33] "cstbasisothr" "gnlsecur" [35] "gnlsothr" "netgnls" [37] "grsincfndrsng" "lessdirfndrsng" [39] "netincfndrsng" "grsincgaming" [41] "lessdirgaming" "netincgaming" [43] "grsalesinvent" "lesscstofgoods" [45] "netincsales" "miscrevtot11e" [47] "compnsatncurrofcr" "othrsalwages" [49] "payrolltx" "profndraising" [51] "txexmptbndsend" "secrdmrtgsend" [53] "unsecurednotesend" "retainedearnend" [55] "totnetassetend" "nonpfrea" [57] "gftgrntsrcvd170" "txrevnuelevied170" [59] "srvcsval170" "grsinc170" [61] "grsrcptsrelated170" "totgftgrntrcvd509" [63] "grsrcptsadmissn509" "txrevnuelevied509" [65] "srvcsval509" "subtotsuppinc509" [67] "totsupp509" "ein" [69] "organization" "eostatus" [71] "tax_yr" "operatingcd" [73] "assetcdgen" "transinccd" [75] "subcd" "grscontrgifts" [77] "intrstrvnue" "dividndsamt" [79] "totexcapgn" "totexcapls" [81] "grsprofitbus" "otherincamt" [83] "compofficers" "contrpdpbks" [85] "totrcptperbks" "totexpnspbks" [87] "excessrcpts" "totexpnsexempt" [89] "netinvstinc" "totaxpyr" [91] "adjnetinc" "invstgovtoblig" [93] "invstcorpstk" "invstcorpbnd" [95] "totinvstsec" "fairmrktvalamt" [97] "undistribincyr" "cmpmininvstret" [99] "sec4940notxcd" "sec4940redtxcd" [101] "infleg" "contractncd" [103] "claimstatcd" "propexchcd" [105] "brwlndmnycd" "furngoodscd" [107] "paidcmpncd" "trnsothasstscd" [109] "agremkpaycd" "undistrinccd" [111] "dirindirintcd" "invstjexmptcd" [113] "propgndacd" "excesshldcd" [115] "grntindivcd" "nchrtygrntcd" [117] "nreligiouscd" "grsrents" [119] "costsold" "totrcptnetinc" [121] "trcptadjnetinc" "topradmnexpnsa" [123] "topradmnexpnsb" "topradmnexpnsd" [125] "totexpnsnetinc" "totexpnsadjnet" [127] "othrcashamt" "mrtgloans" [129] "othrinvstend" "fairmrktvaleoy" [131] "mrtgnotespay" "tfundnworth" [133] "invstexcisetx" "sect511tx" [135] "subtitleatx" "esttaxcr" [137] "txwithldsrc" "txpaidf2758" [139] "erronbkupwthld" "estpnlty" [141] "balduopt" "crelamt" [143] "tfairmrktunuse" "distribamt" [145] "adjnetinccola" "adjnetinccolb" [147] "adjnetinccolc" "adjnetinccold" [149] "adjnetinctot" "qlfydistriba" [151] "qlfydistribb" "qlfydistribc" [153] "qlfydistribd" "qlfydistribtot" [155] "valassetscola" "valassetscolb" [157] "valassetscolc" "valassetscold" [159] "valassetstot" "qlfyasseta" [161] "qlfyassetb" "qlfyassetc" [163] "qlfyassetd" "qlfyassettot" [165] "endwmntscola" "endwmntscolb" [167] "endwmntscolc" "endwmntscold" [169] "endwmntstot" "totsuprtcola" [171] "totsuprtcolb" "totsuprtcolc" [173] "totsuprtcold" "totsuprttot" [175] "pubsuprtcola" "pubsuprtcolb" [177] "pubsuprtcolc" "pubsuprtcold" [179] "pubsuprttot" "grsinvstinca" [181] "grsinvstincb" "grsinvstincc" [183] "grsinvstincd" "grsinvstinctot" ``` From here, we can go straight to analyzing the filings data without any further tedious data manipulation. jsonlite/tests/0000755000176200001440000000000012540777273013261 5ustar liggesusersjsonlite/tests/run-all.R0000644000176200001440000000035212540777273014756 0ustar liggesusers#This file runs all unit tests on every R CMD check. #Comment this out to disable. library(testthat) #filter is to disable tests that rely on external servers test_package("jsonlite", filter="toJSON|fromJSON|libjson|serializeJSON") jsonlite/src/0000755000176200001440000000000012626133701012671 5ustar liggesusersjsonlite/src/escape_chars.c0000644000176200001440000000446012626133701015461 0ustar liggesusers#include #include #include /* Fast escaping of character vectors (Winston Chang) https://gist.github.com/wch/e3ec5b20eb712f1b22b2 http://stackoverflow.com/questions/25609174/fast-escaping-deparsing-of-character-vectors-in-r/25613834#25613834 */ SEXP C_escape_chars_one(SEXP x) { if (TYPEOF(x) != CHARSXP) error("x must be a CHARSXP"); const char* old = CHAR(x); char* old_p = (char*)old; // Count up the number of matches int matches = 0; char oldc; do { oldc = *old_p; switch(oldc) { case '\\': case '"': case '\n': case '\r': case '\t': case '\b': case '\f': matches++; } old_p++; } while(oldc != '\0'); // Copy old string to new string, replacing where necessary. old_p = (char*)old; // Allocate string memory; add 2 for start and end quotes. char* newstr = (char*)malloc(strlen(old) + matches + 3); char* new_p = newstr; *new_p = '"'; new_p++; do { oldc = *old_p; switch(oldc) { case '\\': *new_p = '\\'; new_p++; *new_p = '\\'; break; case '"': *new_p = '\\'; new_p++; *new_p = '"'; break; case '\n': *new_p = '\\'; new_p++; *new_p = 'n'; break; case '\r': *new_p = '\\'; new_p++; *new_p = 'r'; break; case '\t': *new_p = '\\'; new_p++; *new_p = 't'; break; case '\b': *new_p = '\\'; new_p++; *new_p = 'b'; break; case '\f': *new_p = '\\'; new_p++; *new_p = 'f'; break; case '\0': // End with a quote char *new_p = '"'; new_p++; *new_p = '\0'; break; default: *new_p = oldc; } old_p++; new_p++; } while(oldc != '\0'); SEXP val = mkCharCE(newstr, getCharCE(x)); free(newstr); return val; } SEXP C_escape_chars(SEXP x) { if (!isString(x)) error("x must be a character vector."); if (x == R_NilValue || length(x) == 0) return x; int len = length(x); SEXP out = PROTECT(allocVector(STRSXP, len)); for (int i=0; i> 2 ]; out[1] = cb64[ ((in[0] & 0x03) << 4) | ((in[1] & 0xf0) >> 4) ]; out[2] = (unsigned char) (len > 1 ? cb64[ ((in[1] & 0x0f) << 2) | ((in[2] & 0xc0) >> 6) ] : '='); out[3] = (unsigned char) (len > 2 ? cb64[ in[2] & 0x3f ] : '='); } /* ** encode ** ** base64 encode a stream adding padding and line breaks as per spec. */ void encode( FILE *infile, FILE *outfile, int linesize ) { unsigned char in[3], out[4]; int i, len, blocksout = 0; while( !feof( infile ) ) { len = 0; for( i = 0; i < 3; i++ ) { in[i] = (unsigned char) getc( infile ); if( !feof( infile ) ) { len++; } else { in[i] = 0; } } if( len ) { encodeblock( in, out, len ); for( i = 0; i < 4; i++ ) { putc( out[i], outfile ); } blocksout++; } if( blocksout >= (linesize/4) || feof( infile ) ) { if( blocksout ) { fprintf( outfile, "\r\n" ); } blocksout = 0; } } } /* ** decodeblock ** ** decode 4 '6-bit' characters into 3 8-bit binary bytes */ void decodeblock( unsigned char in[4], unsigned char out[3] ) { out[ 0 ] = (unsigned char ) (in[0] << 2 | in[1] >> 4); out[ 1 ] = (unsigned char ) (in[1] << 4 | in[2] >> 2); out[ 2 ] = (unsigned char ) (((in[2] << 6) & 0xc0) | in[3]); } /* ** decode ** ** decode a base64 encoded stream discarding padding, line breaks and noise */ void decode( FILE *infile, FILE *outfile ) { unsigned char in[4], out[3], v; int i, len; while( !feof( infile ) ) { for( len = 0, i = 0; i < 4 && !feof( infile ); i++ ) { v = 0; while( !feof( infile ) && v == 0 ) { v = (unsigned char) getc( infile ); v = (unsigned char) ((v < 43 || v > 122) ? 0 : cd64[ v - 43 ]); if( v ) { v = (unsigned char) ((v == '$') ? 0 : v - 61); } } if( !feof( infile ) ) { len++; if( v ) { in[ i ] = (unsigned char) (v - 1); } } else { in[i] = 0; } } if( len ) { decodeblock( in, out ); for( i = 0; i < len - 1; i++ ) { putc( out[i], outfile ); } } } } /* ** b64_message ** ** Gather text messages in one place. ** */ char *b64_message( int errcode ) { char *msgs[ B64_MAX_MESSAGES ] = { "b64:000:Invalid Message Code.", "b64:001:Syntax Error -- check help for usage.", "b64:002:File Error Opening/Creating Files.", "b64:003:File I/O Error -- Note: output file not removed.", "b64:004:Error on output file close.", "b64:004:linesize set to minimum." }; char *msg = msgs[ 0 ]; if( errcode > 0 && errcode < B64_MAX_MESSAGES ) { msg = msgs[ errcode ]; } return( msg ); } /* ** b64 ** ** 'engine' that opens streams and calls encode/decode */ int b64( int opt, char *infilename, char *outfilename, int linesize ) { FILE *infile; int retcode = B64_FILE_ERROR; if( !infilename ) { infile = stdin; } else { infile = fopen( infilename, "rb" ); } if( !infile ) { perror( infilename ); } else { FILE *outfile; // if( !outfilename ) { // outfile = stdout; // } // else { outfile = fopen( outfilename, "wb" ); // } if( !outfile ) { perror( outfilename ); } else { if( opt == 'e' ) { encode( infile, outfile, linesize ); } else { decode( infile, outfile ); } if (ferror( infile ) || ferror( outfile )) { retcode = B64_FILE_IO_ERROR; } else { retcode = 0; } // if( outfile != stdout ) { if( fclose( outfile ) != 0 ) { perror( b64_message( B64_ERROR_OUT_CLOSE ) ); retcode = B64_FILE_IO_ERROR; } // } } if( infile != stdin ) { fclose( infile ); } } return( retcode ); } /* R functions */ SEXP base64_encode_(SEXP input, SEXP output, SEXP line_size){ int res = b64( 'e', (char*)CHAR(STRING_ELT(input,0)), (char*)CHAR(STRING_ELT(output,0)), INTEGER(line_size)[0] ) ; if( res ){ error( "%s\n", b64_message( res ) ) ; } return R_NilValue ; } SEXP base64_decode_(SEXP input, SEXP output){ int res = b64( 'd', (char*)CHAR(STRING_ELT(input,0)), (char*)CHAR(STRING_ELT(output,0)), 0 ) ; if( res ){ error( "%s\n", b64_message( res ) ) ; } return R_NilValue ; } jsonlite/src/collapse_pretty.c0000644000176200001440000001027112626133701016247 0ustar liggesusers#include #include #include /* a function to insert n spaces */ void append_whitespace(char** cur, size_t n){ memset(*cur, ' ', n); *cur += n; } /* add and increment */ void append_text(char **cur, const char* val, int n){ if(n < 0) n = strlen(val); memcpy(*cur, val, n); *cur += n; } /* collapse a json object with n spaces */ SEXP C_collapse_object_pretty(SEXP x, SEXP y, SEXP indent) { if (!isString(x) || !isString(y)) error("x and y must character vectors."); int ni = asInteger(indent); if(ni == NA_INTEGER) error("indent must not be NA"); int len = length(x); if (len != length(y)) error("x and y must have same length."); //calculate required space size_t nchar_total = 0; for (int i=0; i #include #include #include SEXP R_num_to_char(SEXP x, SEXP digits, SEXP na_as_string, SEXP use_signif) { int len = length(x); int na_string = asLogical(na_as_string); int signif = asLogical(use_signif); char buf[32]; SEXP out = PROTECT(allocVector(STRSXP, len)); if(isInteger(x)){ for (int i=0; i -1 && precision < 10 && fabs(val) < 2147483647 && fabs(val) > 1e-5) { //preferred method: fast with fixed decimal digits //does not support large numbers or scientific notation modp_dtoa2(val, buf, precision); SET_STRING_ELT(out, i, mkChar(buf)); //Rprintf("Using modp_dtoa2\n"); } else { //fall back on sprintf (includes scientific notation) //limit total precision to 15 significant digits to avoid noise //funky formula is mostly to convert decimal digits into significant digits snprintf(buf, 32, "%.*g", (int) ceil(fmin(15, fmax(1, log10(val)) + precision)), val); SET_STRING_ELT(out, i, mkChar(buf)); //Rprintf("Using sprintf with precision %d digits\n",(int) ceil(fmin(15, fmax(1, log10(val)) + precision))); } } } else { error("num_to_char called with invalid object type."); } UNPROTECT(1); return out; } jsonlite/src/base64.h0000644000176200001440000000143612626133701014132 0ustar liggesusers#ifndef BASE64__BASE64_H #define BASE64__BASE64_H #include #include #include #include /* ** returnable errors ** ** Error codes returned to the operating system. ** */ #define B64_SYNTAX_ERROR 1 #define B64_FILE_ERROR 2 #define B64_FILE_IO_ERROR 3 #define B64_ERROR_OUT_CLOSE 4 #define B64_LINE_SIZE_TO_MIN 5 #define B64_DEF_LINE_SIZE 72 #define B64_MIN_LINE_SIZE 4 #define THIS_OPT(ac, av) (ac > 1 ? av[1][0] == '-' ? av[1][1] : 0 : 0) #define B64_MAX_MESSAGES 6 SEXP base64_encode_(SEXP input, SEXP output, SEXP line_size) ; SEXP base64_decode_(SEXP input, SEXP output) ; #endif jsonlite/src/integer64_to_na.c0000644000176200001440000000164012626133701016025 0ustar liggesusers#include #include #define NA_INTEGER64 LLONG_MIN SEXP R_integer64_to_char(SEXP x, SEXP na_as_string){ int len = length(x); int na_string = asLogical(na_as_string); long long * xint = (long long *) REAL(x); char buf[32]; SEXP out = PROTECT(allocVector(STRSXP, len)); for (int i = 0; i < len; i++) { if(xint[i] == NA_INTEGER64){ if(na_string == NA_LOGICAL){ SET_STRING_ELT(out, i, NA_STRING); } else if(na_string){ SET_STRING_ELT(out, i, mkChar("\"NA\"")); } else { SET_STRING_ELT(out, i, mkChar("null")); } } else { #ifdef _WIN32 snprintf(buf, 32, "%I64d", xint[i]); #else //snprintf(buf, 32, "%lld", xint[i]); //modp is faster (but does not work on windows) modp_litoa10(xint[i], buf); #endif SET_STRING_ELT(out, i, mkChar(buf)); } } UNPROTECT(1); return out; } jsonlite/src/null_to_na.c0000644000176200001440000000364112626133701015173 0ustar liggesusers#include #include #include #include /* This function takes a list and replaces all NULL values by NA. In addition, it will parse strings "NA" "NaN" "Inf" and "-Inf", unless there is at least one non-na string element in the list. In that case converting to real values has no point because unlist() will coerse them back into a string anyway. */ SEXP C_null_to_na(SEXP x) { int len = length(x); if(len == 0) return x; //null always turns into NA bool looks_like_character_vector = false; for (int i=0; i * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ #include "yajl_encode.h" #include #include #include #include static void CharToHex(unsigned char c, char * hexBuf) { const char * hexchar = "0123456789ABCDEF"; hexBuf[0] = hexchar[c >> 4]; hexBuf[1] = hexchar[c & 0x0F]; } void yajl_string_encode(const yajl_print_t print, void * ctx, const unsigned char * str, size_t len, int escape_solidus) { size_t beg = 0; size_t end = 0; char hexBuf[7]; hexBuf[0] = '\\'; hexBuf[1] = 'u'; hexBuf[2] = '0'; hexBuf[3] = '0'; hexBuf[6] = 0; while (end < len) { const char * escaped = NULL; switch (str[end]) { case '\r': escaped = "\\r"; break; case '\n': escaped = "\\n"; break; case '\\': escaped = "\\\\"; break; /* it is not required to escape a solidus in JSON: * read sec. 2.5: http://www.ietf.org/rfc/rfc4627.txt * specifically, this production from the grammar: * unescaped = %x20-21 / %x23-5B / %x5D-10FFFF */ case '/': if (escape_solidus) escaped = "\\/"; break; case '"': escaped = "\\\""; break; case '\f': escaped = "\\f"; break; case '\b': escaped = "\\b"; break; case '\t': escaped = "\\t"; break; default: if ((unsigned char) str[end] < 32) { CharToHex(str[end], hexBuf + 4); escaped = hexBuf; } break; } if (escaped != NULL) { print(ctx, (const char *) (str + beg), end - beg); print(ctx, escaped, (unsigned int)strlen(escaped)); beg = ++end; } else { ++end; } } print(ctx, (const char *) (str + beg), end - beg); } static void hexToDigit(unsigned int * val, const unsigned char * hex) { unsigned int i; for (i=0;i<4;i++) { unsigned char c = hex[i]; if (c >= 'A') c = (c & ~0x20) - 7; c -= '0'; assert(!(c & 0xF0)); *val = (*val << 4) | c; } } static void Utf32toUtf8(unsigned int codepoint, char * utf8Buf) { if (codepoint < 0x80) { utf8Buf[0] = (char) codepoint; utf8Buf[1] = 0; } else if (codepoint < 0x0800) { utf8Buf[0] = (char) ((codepoint >> 6) | 0xC0); utf8Buf[1] = (char) ((codepoint & 0x3F) | 0x80); utf8Buf[2] = 0; } else if (codepoint < 0x10000) { utf8Buf[0] = (char) ((codepoint >> 12) | 0xE0); utf8Buf[1] = (char) (((codepoint >> 6) & 0x3F) | 0x80); utf8Buf[2] = (char) ((codepoint & 0x3F) | 0x80); utf8Buf[3] = 0; } else if (codepoint < 0x200000) { utf8Buf[0] =(char)((codepoint >> 18) | 0xF0); utf8Buf[1] =(char)(((codepoint >> 12) & 0x3F) | 0x80); utf8Buf[2] =(char)(((codepoint >> 6) & 0x3F) | 0x80); utf8Buf[3] =(char)((codepoint & 0x3F) | 0x80); utf8Buf[4] = 0; } else { utf8Buf[0] = '?'; utf8Buf[1] = 0; } } void yajl_string_decode(yajl_buf buf, const unsigned char * str, size_t len) { size_t beg = 0; size_t end = 0; while (end < len) { if (str[end] == '\\') { char utf8Buf[5]; const char * unescaped = "?"; yajl_buf_append(buf, str + beg, end - beg); switch (str[++end]) { case 'r': unescaped = "\r"; break; case 'n': unescaped = "\n"; break; case '\\': unescaped = "\\"; break; case '/': unescaped = "/"; break; case '"': unescaped = "\""; break; case 'f': unescaped = "\f"; break; case 'b': unescaped = "\b"; break; case 't': unescaped = "\t"; break; case 'u': { unsigned int codepoint = 0; hexToDigit(&codepoint, str + ++end); end+=3; /* check if this is a surrogate */ if ((codepoint & 0xFC00) == 0xD800) { end++; if (str[end] == '\\' && str[end + 1] == 'u') { unsigned int surrogate = 0; hexToDigit(&surrogate, str + end + 2); codepoint = (((codepoint & 0x3F) << 10) | ((((codepoint >> 6) & 0xF) + 1) << 16) | (surrogate & 0x3FF)); end += 5; } else { unescaped = "?"; break; } } Utf32toUtf8(codepoint, utf8Buf); unescaped = utf8Buf; if (codepoint == 0) { yajl_buf_append(buf, unescaped, 1); beg = ++end; continue; } break; } default: assert("this should never happen" == NULL); } yajl_buf_append(buf, unescaped, (unsigned int)strlen(unescaped)); beg = ++end; } else { end++; } } yajl_buf_append(buf, str + beg, end - beg); } #define ADV_PTR s++; if (!(len--)) return 0; int yajl_string_validate_utf8(const unsigned char * s, size_t len) { if (!len) return 1; if (!s) return 0; while (len--) { /* single byte */ if (*s <= 0x7f) { /* noop */ } /* two byte */ else if ((*s >> 5) == 0x6) { ADV_PTR; if (!((*s >> 6) == 0x2)) return 0; } /* three byte */ else if ((*s >> 4) == 0x0e) { ADV_PTR; if (!((*s >> 6) == 0x2)) return 0; ADV_PTR; if (!((*s >> 6) == 0x2)) return 0; } /* four byte */ else if ((*s >> 3) == 0x1e) { ADV_PTR; if (!((*s >> 6) == 0x2)) return 0; ADV_PTR; if (!((*s >> 6) == 0x2)) return 0; ADV_PTR; if (!((*s >> 6) == 0x2)) return 0; } else { return 0; } s++; } return 1; } jsonlite/src/yajl/yajl_gen.c0000644000176200001440000002544312626133701015574 0ustar liggesusers/* * Copyright (c) 2007-2014, Lloyd Hilaiel * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ #include "api/yajl_gen.h" #include "yajl_buf.h" #include "yajl_encode.h" #include #include #include #include #include typedef enum { yajl_gen_start, yajl_gen_map_start, yajl_gen_map_key, yajl_gen_map_val, yajl_gen_array_start, yajl_gen_in_array, yajl_gen_complete, yajl_gen_error } yajl_gen_state; struct yajl_gen_t { unsigned int flags; unsigned int depth; const char * indentString; yajl_gen_state state[YAJL_MAX_DEPTH]; yajl_print_t print; void * ctx; /* yajl_buf */ /* memory allocation routines */ yajl_alloc_funcs alloc; }; int yajl_gen_config(yajl_gen g, yajl_gen_option opt, ...) { int rv = 1; va_list ap; va_start(ap, opt); switch(opt) { case yajl_gen_beautify: case yajl_gen_validate_utf8: case yajl_gen_escape_solidus: if (va_arg(ap, int)) g->flags |= opt; else g->flags &= ~opt; break; case yajl_gen_indent_string: { const char *indent = va_arg(ap, const char *); g->indentString = indent; for (; *indent; indent++) { if (*indent != '\n' && *indent != '\v' && *indent != '\f' && *indent != '\t' && *indent != '\r' && *indent != ' ') { g->indentString = NULL; rv = 0; } } break; } case yajl_gen_print_callback: yajl_buf_free(g->ctx); g->print = va_arg(ap, const yajl_print_t); g->ctx = va_arg(ap, void *); break; default: rv = 0; } va_end(ap); return rv; } yajl_gen yajl_gen_alloc(const yajl_alloc_funcs * afs) { yajl_gen g = NULL; yajl_alloc_funcs afsBuffer; /* first order of business is to set up memory allocation routines */ if (afs != NULL) { if (afs->malloc == NULL || afs->realloc == NULL || afs->free == NULL) { return NULL; } } else { yajl_set_default_alloc_funcs(&afsBuffer); afs = &afsBuffer; } g = (yajl_gen) YA_MALLOC(afs, sizeof(struct yajl_gen_t)); if (!g) return NULL; memset((void *) g, 0, sizeof(struct yajl_gen_t)); /* copy in pointers to allocation routines */ memcpy((void *) &(g->alloc), (void *) afs, sizeof(yajl_alloc_funcs)); g->print = (yajl_print_t)&yajl_buf_append; g->ctx = yajl_buf_alloc(&(g->alloc)); g->indentString = " "; return g; } void yajl_gen_reset(yajl_gen g, const char * sep) { g->depth = 0; memset((void *) &(g->state), 0, sizeof(g->state)); if (sep != NULL) g->print(g->ctx, sep, strlen(sep)); } void yajl_gen_free(yajl_gen g) { if (g->print == (yajl_print_t)&yajl_buf_append) yajl_buf_free((yajl_buf)g->ctx); YA_FREE(&(g->alloc), g); } #define INSERT_SEP \ if (g->state[g->depth] == yajl_gen_map_key || \ g->state[g->depth] == yajl_gen_in_array) { \ g->print(g->ctx, ",", 1); \ if ((g->flags & yajl_gen_beautify)) g->print(g->ctx, "\n", 1); \ } else if (g->state[g->depth] == yajl_gen_map_val) { \ g->print(g->ctx, ":", 1); \ if ((g->flags & yajl_gen_beautify)) g->print(g->ctx, " ", 1); \ } #define INSERT_WHITESPACE \ if ((g->flags & yajl_gen_beautify)) { \ if (g->state[g->depth] != yajl_gen_map_val) { \ unsigned int _i; \ for (_i=0;_idepth;_i++) \ g->print(g->ctx, \ g->indentString, \ (unsigned int)strlen(g->indentString)); \ } \ } #define ENSURE_NOT_KEY \ if (g->state[g->depth] == yajl_gen_map_key || \ g->state[g->depth] == yajl_gen_map_start) { \ return yajl_gen_keys_must_be_strings; \ } \ /* check that we're not complete, or in error state. in a valid state * to be generating */ #define ENSURE_VALID_STATE \ if (g->state[g->depth] == yajl_gen_error) { \ return yajl_gen_in_error_state;\ } else if (g->state[g->depth] == yajl_gen_complete) { \ return yajl_gen_generation_complete; \ } #define INCREMENT_DEPTH \ if (++(g->depth) >= YAJL_MAX_DEPTH) return yajl_max_depth_exceeded; #define DECREMENT_DEPTH \ if (--(g->depth) >= YAJL_MAX_DEPTH) return yajl_gen_generation_complete; #define APPENDED_ATOM \ switch (g->state[g->depth]) { \ case yajl_gen_start: \ g->state[g->depth] = yajl_gen_complete; \ break; \ case yajl_gen_map_start: \ case yajl_gen_map_key: \ g->state[g->depth] = yajl_gen_map_val; \ break; \ case yajl_gen_array_start: \ g->state[g->depth] = yajl_gen_in_array; \ break; \ case yajl_gen_map_val: \ g->state[g->depth] = yajl_gen_map_key; \ break; \ default: \ break; \ } \ #define FINAL_NEWLINE \ if ((g->flags & yajl_gen_beautify) && g->state[g->depth] == yajl_gen_complete) \ g->print(g->ctx, "\n", 1); yajl_gen_status yajl_gen_integer(yajl_gen g, long long int number) { char i[32]; ENSURE_VALID_STATE; ENSURE_NOT_KEY; INSERT_SEP; INSERT_WHITESPACE; #ifdef _WIN32 sprintf(i, "%I64d", number); #else sprintf(i, "%lld", number); #endif g->print(g->ctx, i, (unsigned int)strlen(i)); APPENDED_ATOM; FINAL_NEWLINE; return yajl_gen_status_ok; } #if defined(_WIN32) || defined(WIN32) #include #define isnan _isnan #define isinf !_finite #endif yajl_gen_status yajl_gen_double(yajl_gen g, double number) { char i[32]; ENSURE_VALID_STATE; ENSURE_NOT_KEY; if (isnan(number) || isinf(number)) return yajl_gen_invalid_number; INSERT_SEP; INSERT_WHITESPACE; sprintf(i, "%.20g", number); if (strspn(i, "0123456789-") == strlen(i)) { strcat(i, ".0"); } g->print(g->ctx, i, (unsigned int)strlen(i)); APPENDED_ATOM; FINAL_NEWLINE; return yajl_gen_status_ok; } yajl_gen_status yajl_gen_number(yajl_gen g, const char * s, size_t l) { ENSURE_VALID_STATE; ENSURE_NOT_KEY; INSERT_SEP; INSERT_WHITESPACE; g->print(g->ctx, s, l); APPENDED_ATOM; FINAL_NEWLINE; return yajl_gen_status_ok; } yajl_gen_status yajl_gen_string(yajl_gen g, const unsigned char * str, size_t len) { // if validation is enabled, check that the string is valid utf8 // XXX: This checking could be done a little faster, in the same pass as // the string encoding if (g->flags & yajl_gen_validate_utf8) { if (!yajl_string_validate_utf8(str, len)) { return yajl_gen_invalid_string; } } ENSURE_VALID_STATE; INSERT_SEP; INSERT_WHITESPACE; g->print(g->ctx, "\"", 1); yajl_string_encode(g->print, g->ctx, str, len, g->flags & yajl_gen_escape_solidus); g->print(g->ctx, "\"", 1); APPENDED_ATOM; FINAL_NEWLINE; return yajl_gen_status_ok; } yajl_gen_status yajl_gen_null(yajl_gen g) { ENSURE_VALID_STATE; ENSURE_NOT_KEY; INSERT_SEP; INSERT_WHITESPACE; g->print(g->ctx, "null", strlen("null")); APPENDED_ATOM; FINAL_NEWLINE; return yajl_gen_status_ok; } yajl_gen_status yajl_gen_bool(yajl_gen g, int boolean) { const char * val = boolean ? "true" : "false"; ENSURE_VALID_STATE; ENSURE_NOT_KEY; INSERT_SEP; INSERT_WHITESPACE; g->print(g->ctx, val, (unsigned int)strlen(val)); APPENDED_ATOM; FINAL_NEWLINE; return yajl_gen_status_ok; } yajl_gen_status yajl_gen_map_open(yajl_gen g) { ENSURE_VALID_STATE; ENSURE_NOT_KEY; INSERT_SEP; INSERT_WHITESPACE; INCREMENT_DEPTH; g->state[g->depth] = yajl_gen_map_start; g->print(g->ctx, "{", 1); if ((g->flags & yajl_gen_beautify)) g->print(g->ctx, "\n", 1); FINAL_NEWLINE; return yajl_gen_status_ok; } yajl_gen_status yajl_gen_map_close(yajl_gen g) { ENSURE_VALID_STATE; DECREMENT_DEPTH; if ((g->flags & yajl_gen_beautify)) g->print(g->ctx, "\n", 1); APPENDED_ATOM; INSERT_WHITESPACE; g->print(g->ctx, "}", 1); FINAL_NEWLINE; return yajl_gen_status_ok; } yajl_gen_status yajl_gen_array_open(yajl_gen g) { ENSURE_VALID_STATE; ENSURE_NOT_KEY; INSERT_SEP; INSERT_WHITESPACE; INCREMENT_DEPTH; g->state[g->depth] = yajl_gen_array_start; g->print(g->ctx, "[", 1); if ((g->flags & yajl_gen_beautify)) g->print(g->ctx, "\n", 1); FINAL_NEWLINE; return yajl_gen_status_ok; } yajl_gen_status yajl_gen_array_close(yajl_gen g) { ENSURE_VALID_STATE; DECREMENT_DEPTH; if ((g->flags & yajl_gen_beautify)) g->print(g->ctx, "\n", 1); APPENDED_ATOM; INSERT_WHITESPACE; g->print(g->ctx, "]", 1); FINAL_NEWLINE; return yajl_gen_status_ok; } yajl_gen_status yajl_gen_get_buf(yajl_gen g, const unsigned char ** buf, size_t * len) { if (g->print != (yajl_print_t)&yajl_buf_append) return yajl_gen_no_buf; *buf = yajl_buf_data((yajl_buf)g->ctx); *len = yajl_buf_len((yajl_buf)g->ctx); return yajl_gen_status_ok; } void yajl_gen_clear(yajl_gen g) { if (g->print == (yajl_print_t)&yajl_buf_append) yajl_buf_clear((yajl_buf)g->ctx); } jsonlite/src/yajl/yajl_tree.c0000644000176200001440000003530512626133701015760 0ustar liggesusers/* * Copyright (c) 2010-2011 Florian Forster * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ #include #include #include #include #include #include "api/yajl_tree.h" #include "api/yajl_parse.h" #include "yajl_parser.h" /* #if defined(_WIN32) || defined(WIN32) Fix for windows XP from https://rt.cpan.org/Public/Bug/Display.html?id=69113 */ #if((defined(_WIN32) || defined(WIN32)) && defined(_MSC_VER)) /* end fix */ #define snprintf sprintf_s #endif #define STATUS_CONTINUE 1 #define STATUS_ABORT 0 struct stack_elem_s; typedef struct stack_elem_s stack_elem_t; struct stack_elem_s { char * key; yajl_val value; stack_elem_t *next; }; struct context_s { stack_elem_t *stack; yajl_val root; char *errbuf; size_t errbuf_size; }; typedef struct context_s context_t; #define RETURN_ERROR(ctx,retval,...) { \ if ((ctx)->errbuf != NULL) \ snprintf ((ctx)->errbuf, (ctx)->errbuf_size, __VA_ARGS__); \ return (retval); \ } static yajl_val value_alloc (yajl_type type) { yajl_val v; v = malloc (sizeof (*v)); if (v == NULL) return (NULL); memset (v, 0, sizeof (*v)); v->type = type; return (v); } static void yajl_object_free (yajl_val v) { size_t i; if (!YAJL_IS_OBJECT(v)) return; for (i = 0; i < v->u.object.len; i++) { free((char *) v->u.object.keys[i]); v->u.object.keys[i] = NULL; yajl_tree_free (v->u.object.values[i]); v->u.object.values[i] = NULL; } free((void*) v->u.object.keys); free(v->u.object.values); free(v); } static void yajl_array_free (yajl_val v) { size_t i; if (!YAJL_IS_ARRAY(v)) return; for (i = 0; i < v->u.array.len; i++) { yajl_tree_free (v->u.array.values[i]); v->u.array.values[i] = NULL; } free(v->u.array.values); free(v); } /* * Parsing nested objects and arrays is implemented using a stack. When a new * object or array starts (a curly or a square opening bracket is read), an * appropriate value is pushed on the stack. When the end of the object is * reached (an appropriate closing bracket has been read), the value is popped * off the stack and added to the enclosing object using "context_add_value". */ static int context_push(context_t *ctx, yajl_val v) { stack_elem_t *stack; stack = malloc (sizeof (*stack)); if (stack == NULL) RETURN_ERROR (ctx, ENOMEM, "Out of memory"); memset (stack, 0, sizeof (*stack)); assert ((ctx->stack == NULL) || YAJL_IS_OBJECT (v) || YAJL_IS_ARRAY (v)); stack->value = v; stack->next = ctx->stack; ctx->stack = stack; return (0); } static yajl_val context_pop(context_t *ctx) { stack_elem_t *stack; yajl_val v; if (ctx->stack == NULL) RETURN_ERROR (ctx, NULL, "context_pop: " "Bottom of stack reached prematurely"); stack = ctx->stack; ctx->stack = stack->next; v = stack->value; free (stack); return (v); } static int object_add_keyval(context_t *ctx, yajl_val obj, char *key, yajl_val value) { const char **tmpk; yajl_val *tmpv; /* We're checking for NULL in "context_add_value" or its callers. */ assert (ctx != NULL); assert (obj != NULL); assert (key != NULL); assert (value != NULL); /* We're assuring that "obj" is an object in "context_add_value". */ assert(YAJL_IS_OBJECT(obj)); tmpk = realloc((void *) obj->u.object.keys, sizeof(*(obj->u.object.keys)) * (obj->u.object.len + 1)); if (tmpk == NULL) RETURN_ERROR(ctx, ENOMEM, "Out of memory"); obj->u.object.keys = tmpk; tmpv = realloc(obj->u.object.values, sizeof (*obj->u.object.values) * (obj->u.object.len + 1)); if (tmpv == NULL) RETURN_ERROR(ctx, ENOMEM, "Out of memory"); obj->u.object.values = tmpv; obj->u.object.keys[obj->u.object.len] = key; obj->u.object.values[obj->u.object.len] = value; obj->u.object.len++; return (0); } static int array_add_value (context_t *ctx, yajl_val array, yajl_val value) { yajl_val *tmp; /* We're checking for NULL pointers in "context_add_value" or its * callers. */ assert (ctx != NULL); assert (array != NULL); assert (value != NULL); /* "context_add_value" will only call us with array values. */ assert(YAJL_IS_ARRAY(array)); tmp = realloc(array->u.array.values, sizeof(*(array->u.array.values)) * (array->u.array.len + 1)); if (tmp == NULL) RETURN_ERROR(ctx, ENOMEM, "Out of memory"); array->u.array.values = tmp; array->u.array.values[array->u.array.len] = value; array->u.array.len++; return 0; } /* * Add a value to the value on top of the stack or the "root" member in the * context if the end of the parsing process is reached. */ static int context_add_value (context_t *ctx, yajl_val v) { /* We're checking for NULL values in all the calling functions. */ assert (ctx != NULL); assert (v != NULL); /* * There are three valid states in which this function may be called: * - There is no value on the stack => This is the only value. This is the * last step done when parsing a document. We assign the value to the * "root" member and return. * - The value on the stack is an object. In this case store the key on the * stack or, if the key has already been read, add key and value to the * object. * - The value on the stack is an array. In this case simply add the value * and return. */ if (ctx->stack == NULL) { assert (ctx->root == NULL); ctx->root = v; return (0); } else if (YAJL_IS_OBJECT (ctx->stack->value)) { if (ctx->stack->key == NULL) { if (!YAJL_IS_STRING (v)) RETURN_ERROR (ctx, EINVAL, "context_add_value: " "Object key is not a string (%#04x)", v->type); ctx->stack->key = v->u.string; v->u.string = NULL; free(v); return (0); } else /* if (ctx->key != NULL) */ { char * key; key = ctx->stack->key; ctx->stack->key = NULL; return (object_add_keyval (ctx, ctx->stack->value, key, v)); } } else if (YAJL_IS_ARRAY (ctx->stack->value)) { return (array_add_value (ctx, ctx->stack->value, v)); } else { RETURN_ERROR (ctx, EINVAL, "context_add_value: Cannot add value to " "a value of type %#04x (not a composite type)", ctx->stack->value->type); } } static int handle_string (void *ctx, const unsigned char *string, size_t string_length) { yajl_val v; v = value_alloc (yajl_t_string); if (v == NULL) RETURN_ERROR ((context_t *) ctx, STATUS_ABORT, "Out of memory"); v->u.string = malloc (string_length + 1); if (v->u.string == NULL) { free (v); RETURN_ERROR ((context_t *) ctx, STATUS_ABORT, "Out of memory"); } memcpy(v->u.string, string, string_length); v->u.string[string_length] = 0; return ((context_add_value (ctx, v) == 0) ? STATUS_CONTINUE : STATUS_ABORT); } static int handle_number (void *ctx, const char *string, size_t string_length) { yajl_val v; char *endptr; v = value_alloc(yajl_t_number); if (v == NULL) RETURN_ERROR((context_t *) ctx, STATUS_ABORT, "Out of memory"); v->u.number.r = malloc(string_length + 1); if (v->u.number.r == NULL) { free(v); RETURN_ERROR((context_t *) ctx, STATUS_ABORT, "Out of memory"); } memcpy(v->u.number.r, string, string_length); v->u.number.r[string_length] = 0; v->u.number.flags = 0; errno = 0; v->u.number.i = yajl_parse_integer((const unsigned char *) v->u.number.r, strlen(v->u.number.r)); if (errno == 0) v->u.number.flags |= YAJL_NUMBER_INT_VALID; endptr = NULL; errno = 0; v->u.number.d = strtod(v->u.number.r, &endptr); if ((errno == 0) && (endptr != NULL) && (*endptr == 0)) v->u.number.flags |= YAJL_NUMBER_DOUBLE_VALID; return ((context_add_value(ctx, v) == 0) ? STATUS_CONTINUE : STATUS_ABORT); } static int handle_start_map (void *ctx) { yajl_val v; v = value_alloc(yajl_t_object); if (v == NULL) RETURN_ERROR ((context_t *) ctx, STATUS_ABORT, "Out of memory"); v->u.object.keys = NULL; v->u.object.values = NULL; v->u.object.len = 0; return ((context_push (ctx, v) == 0) ? STATUS_CONTINUE : STATUS_ABORT); } static int handle_end_map (void *ctx) { yajl_val v; v = context_pop (ctx); if (v == NULL) return (STATUS_ABORT); return ((context_add_value (ctx, v) == 0) ? STATUS_CONTINUE : STATUS_ABORT); } static int handle_start_array (void *ctx) { yajl_val v; v = value_alloc(yajl_t_array); if (v == NULL) RETURN_ERROR ((context_t *) ctx, STATUS_ABORT, "Out of memory"); v->u.array.values = NULL; v->u.array.len = 0; return ((context_push (ctx, v) == 0) ? STATUS_CONTINUE : STATUS_ABORT); } static int handle_end_array (void *ctx) { yajl_val v; v = context_pop (ctx); if (v == NULL) return (STATUS_ABORT); return ((context_add_value (ctx, v) == 0) ? STATUS_CONTINUE : STATUS_ABORT); } static int handle_boolean (void *ctx, int boolean_value) { yajl_val v; v = value_alloc (boolean_value ? yajl_t_true : yajl_t_false); if (v == NULL) RETURN_ERROR ((context_t *) ctx, STATUS_ABORT, "Out of memory"); return ((context_add_value (ctx, v) == 0) ? STATUS_CONTINUE : STATUS_ABORT); } static int handle_null (void *ctx) { yajl_val v; v = value_alloc (yajl_t_null); if (v == NULL) RETURN_ERROR ((context_t *) ctx, STATUS_ABORT, "Out of memory"); return ((context_add_value (ctx, v) == 0) ? STATUS_CONTINUE : STATUS_ABORT); } /* * Public functions */ yajl_val yajl_tree_parse (const char *input, char *error_buffer, size_t error_buffer_size) { static const yajl_callbacks callbacks = { /* null = */ handle_null, /* boolean = */ handle_boolean, /* integer = */ NULL, /* double = */ NULL, /* number = */ handle_number, /* string = */ handle_string, /* start map = */ handle_start_map, /* map key = */ handle_string, /* end map = */ handle_end_map, /* start array = */ handle_start_array, /* end array = */ handle_end_array }; yajl_handle handle; yajl_status status; char * internal_err_str; context_t ctx = { NULL, NULL, NULL, 0 }; ctx.errbuf = error_buffer; ctx.errbuf_size = error_buffer_size; if (error_buffer != NULL) memset (error_buffer, 0, error_buffer_size); handle = yajl_alloc (&callbacks, NULL, &ctx); yajl_config(handle, yajl_allow_comments, 1); status = yajl_parse(handle, (unsigned char *) input, strlen (input)); //fix by jeroen if(status == yajl_status_ok){ status = yajl_complete_parse (handle); } //end of fix if (status != yajl_status_ok) { if (error_buffer != NULL && error_buffer_size > 0) { internal_err_str = (char *) yajl_get_error(handle, 1, (const unsigned char *) input, strlen(input)); snprintf(error_buffer, error_buffer_size, "%s", internal_err_str); YA_FREE(&(handle->alloc), internal_err_str); } yajl_free (handle); return NULL; } yajl_free (handle); return (ctx.root); } yajl_val yajl_tree_get(yajl_val n, const char ** path, yajl_type type) { if (!path) return NULL; while (n && *path) { size_t i; size_t len; if (n->type != yajl_t_object) return NULL; len = n->u.object.len; for (i = 0; i < len; i++) { if (!strcmp(*path, n->u.object.keys[i])) { n = n->u.object.values[i]; break; } } if (i == len) return NULL; path++; } if (n && type != yajl_t_any && type != n->type) n = NULL; return n; } void yajl_tree_free (yajl_val v) { if (v == NULL) return; if (YAJL_IS_STRING(v)) { free(v->u.string); free(v); } else if (YAJL_IS_NUMBER(v)) { free(v->u.number.r); free(v); } else if (YAJL_GET_OBJECT(v)) { yajl_object_free(v); } else if (YAJL_GET_ARRAY(v)) { yajl_array_free(v); } else /* if (yajl_t_true or yajl_t_false or yajl_t_null) */ { free(v); } } /* * Stuff below added by Jeroen to support push parsing over connection interface. */ yajl_handle push_parser_new () { /* init callback handlers */ yajl_callbacks *callbacks = malloc(sizeof(yajl_callbacks)); callbacks->yajl_null = handle_null; callbacks->yajl_boolean = handle_boolean; callbacks->yajl_number = handle_number; callbacks->yajl_integer = NULL; callbacks->yajl_double = NULL; callbacks->yajl_string = handle_string; callbacks->yajl_start_map = handle_start_map; callbacks->yajl_map_key = handle_string; callbacks->yajl_end_map = handle_end_map; callbacks->yajl_start_array = handle_start_array; callbacks->yajl_end_array = handle_end_array; /* init context */ context_t *ctx = malloc(sizeof(context_t)); ctx->root = NULL; ctx->stack = NULL; ctx->errbuf = malloc(1024); ctx->errbuf_size = 1024; /* init handle */ yajl_handle handle = yajl_alloc(callbacks, NULL, ctx); yajl_config(handle, yajl_allow_comments, 1); return handle; } yajl_val push_parser_get(yajl_handle handle){ context_t *ctx = (context_t*) handle->ctx; return ctx->root; } jsonlite/src/yajl/yajl_alloc.h0000644000176200001440000000234112626133701016112 0ustar liggesusers/* * Copyright (c) 2007-2014, Lloyd Hilaiel * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ /** * \file yajl_alloc.h * default memory allocation routines for yajl which use malloc/realloc and * free */ #ifndef __YAJL_ALLOC_H__ #define __YAJL_ALLOC_H__ #include "api/yajl_common.h" #define YA_MALLOC(afs, sz) (afs)->malloc((afs)->ctx, (sz)) #define YA_FREE(afs, ptr) (afs)->free((afs)->ctx, (ptr)) #define YA_REALLOC(afs, ptr, sz) (afs)->realloc((afs)->ctx, (ptr), (sz)) void yajl_set_default_alloc_funcs(yajl_alloc_funcs * yaf); #endif jsonlite/src/yajl/yajl_parser.c0000644000176200001440000005027412626133701016317 0ustar liggesusers/* * Copyright (c) 2007-2014, Lloyd Hilaiel * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ #include "api/yajl_parse.h" #include "yajl_lex.h" #include "yajl_parser.h" #include "yajl_encode.h" #include "yajl_bytestack.h" #include #include #include #include #include #include #include #include #define MAX_VALUE_TO_MULTIPLY ((LLONG_MAX / 10) + (LLONG_MAX % 10)) /* same semantics as strtol */ long long yajl_parse_integer(const unsigned char *number, unsigned int length) { long long ret = 0; long sign = 1; const unsigned char *pos = number; if (*pos == '-') { pos++; sign = -1; } if (*pos == '+') { pos++; } while (pos < number + length) { if ( ret > MAX_VALUE_TO_MULTIPLY ) { errno = ERANGE; return sign == 1 ? LLONG_MAX : LLONG_MIN; } ret *= 10; if (LLONG_MAX - ret < (*pos - '0')) { errno = ERANGE; return sign == 1 ? LLONG_MAX : LLONG_MIN; } if (*pos < '0' || *pos > '9') { errno = ERANGE; return sign == 1 ? LLONG_MAX : LLONG_MIN; } ret += (*pos++ - '0'); } return sign * ret; } unsigned char * yajl_render_error_string(yajl_handle hand, const unsigned char * jsonText, size_t jsonTextLen, int verbose) { size_t offset = hand->bytesConsumed; unsigned char * str; const char * errorType = NULL; const char * errorText = NULL; char text[72]; const char * arrow = " (right here) ------^\n"; if (yajl_bs_current(hand->stateStack) == yajl_state_parse_error) { errorType = "parse"; errorText = hand->parseError; } else if (yajl_bs_current(hand->stateStack) == yajl_state_lexical_error) { errorType = "lexical"; errorText = yajl_lex_error_to_string(yajl_lex_get_error(hand->lexer)); } else { errorType = "unknown"; } { size_t memneeded = 0; memneeded += strlen(errorType); memneeded += strlen(" error"); if (errorText != NULL) { memneeded += strlen(": "); memneeded += strlen(errorText); } str = (unsigned char *) YA_MALLOC(&(hand->alloc), memneeded + 2); if (!str) return NULL; str[0] = 0; strcat((char *) str, errorType); strcat((char *) str, " error"); if (errorText != NULL) { strcat((char *) str, ": "); strcat((char *) str, errorText); } strcat((char *) str, "\n"); } /* now we append as many spaces as needed to make sure the error * falls at char 41, if verbose was specified */ if (verbose) { size_t start, end, i; size_t spacesNeeded; spacesNeeded = (offset < 30 ? 40 - offset : 10); start = (offset >= 30 ? offset - 30 : 0); end = (offset + 30 > jsonTextLen ? jsonTextLen : offset + 30); for (i=0;ialloc), (unsigned int)(strlen((char *) str) + strlen((char *) text) + strlen(arrow) + 1)); if (newStr) { newStr[0] = 0; strcat((char *) newStr, (char *) str); strcat((char *) newStr, text); strcat((char *) newStr, arrow); } YA_FREE(&(hand->alloc), str); str = (unsigned char *) newStr; } } return str; } /* check for client cancelation */ #define _CC_CHK(x) \ if (!(x)) { \ yajl_bs_set(hand->stateStack, yajl_state_parse_error); \ hand->parseError = \ "client cancelled parse via callback return value"; \ return yajl_status_client_canceled; \ } yajl_status yajl_do_finish(yajl_handle hand) { yajl_status stat; stat = yajl_do_parse(hand,(const unsigned char *) " ",1); if (stat != yajl_status_ok) return stat; switch(yajl_bs_current(hand->stateStack)) { case yajl_state_parse_error: case yajl_state_lexical_error: return yajl_status_error; case yajl_state_got_value: case yajl_state_parse_complete: return yajl_status_ok; default: if (!(hand->flags & yajl_allow_partial_values)) { yajl_bs_set(hand->stateStack, yajl_state_parse_error); hand->parseError = "premature EOF"; return yajl_status_error; } return yajl_status_ok; } } yajl_status yajl_do_parse(yajl_handle hand, const unsigned char * jsonText, size_t jsonTextLen) { yajl_tok tok; const unsigned char * buf; size_t bufLen; size_t * offset = &(hand->bytesConsumed); *offset = 0; around_again: switch (yajl_bs_current(hand->stateStack)) { case yajl_state_parse_complete: if (hand->flags & yajl_allow_multiple_values) { yajl_bs_set(hand->stateStack, yajl_state_got_value); goto around_again; } if (!(hand->flags & yajl_allow_trailing_garbage)) { if (*offset != jsonTextLen) { tok = yajl_lex_lex(hand->lexer, jsonText, jsonTextLen, offset, &buf, &bufLen); if (tok != yajl_tok_eof) { yajl_bs_set(hand->stateStack, yajl_state_parse_error); hand->parseError = "trailing garbage"; } goto around_again; } } return yajl_status_ok; case yajl_state_lexical_error: case yajl_state_parse_error: return yajl_status_error; case yajl_state_start: case yajl_state_got_value: case yajl_state_map_need_val: case yajl_state_array_need_val: case yajl_state_array_start: { /* for arrays and maps, we advance the state for this * depth, then push the state of the next depth. * If an error occurs during the parsing of the nesting * enitity, the state at this level will not matter. * a state that needs pushing will be anything other * than state_start */ yajl_state stateToPush = yajl_state_start; tok = yajl_lex_lex(hand->lexer, jsonText, jsonTextLen, offset, &buf, &bufLen); switch (tok) { case yajl_tok_eof: return yajl_status_ok; case yajl_tok_error: yajl_bs_set(hand->stateStack, yajl_state_lexical_error); goto around_again; case yajl_tok_string: if (hand->callbacks && hand->callbacks->yajl_string) { _CC_CHK(hand->callbacks->yajl_string(hand->ctx, buf, bufLen)); } break; case yajl_tok_string_with_escapes: if (hand->callbacks && hand->callbacks->yajl_string) { yajl_buf_clear(hand->decodeBuf); yajl_string_decode(hand->decodeBuf, buf, bufLen); _CC_CHK(hand->callbacks->yajl_string( hand->ctx, yajl_buf_data(hand->decodeBuf), yajl_buf_len(hand->decodeBuf))); } break; case yajl_tok_bool: if (hand->callbacks && hand->callbacks->yajl_boolean) { _CC_CHK(hand->callbacks->yajl_boolean(hand->ctx, *buf == 't')); } break; case yajl_tok_null: if (hand->callbacks && hand->callbacks->yajl_null) { _CC_CHK(hand->callbacks->yajl_null(hand->ctx)); } break; case yajl_tok_left_bracket: if (hand->callbacks && hand->callbacks->yajl_start_map) { _CC_CHK(hand->callbacks->yajl_start_map(hand->ctx)); } stateToPush = yajl_state_map_start; break; case yajl_tok_left_brace: if (hand->callbacks && hand->callbacks->yajl_start_array) { _CC_CHK(hand->callbacks->yajl_start_array(hand->ctx)); } stateToPush = yajl_state_array_start; break; case yajl_tok_integer: if (hand->callbacks) { if (hand->callbacks->yajl_number) { _CC_CHK(hand->callbacks->yajl_number( hand->ctx,(const char *) buf, bufLen)); } else if (hand->callbacks->yajl_integer) { long long int i = 0; errno = 0; i = yajl_parse_integer(buf, bufLen); if ((i == LLONG_MIN || i == LLONG_MAX) && errno == ERANGE) { yajl_bs_set(hand->stateStack, yajl_state_parse_error); hand->parseError = "integer overflow" ; /* try to restore error offset */ if (*offset >= bufLen) *offset -= bufLen; else *offset = 0; goto around_again; } _CC_CHK(hand->callbacks->yajl_integer(hand->ctx, i)); } } break; case yajl_tok_double: if (hand->callbacks) { if (hand->callbacks->yajl_number) { _CC_CHK(hand->callbacks->yajl_number( hand->ctx, (const char *) buf, bufLen)); } else if (hand->callbacks->yajl_double) { double d = 0.0; yajl_buf_clear(hand->decodeBuf); yajl_buf_append(hand->decodeBuf, buf, bufLen); buf = yajl_buf_data(hand->decodeBuf); errno = 0; d = strtod((char *) buf, NULL); if ((d == HUGE_VAL || d == -HUGE_VAL) && errno == ERANGE) { yajl_bs_set(hand->stateStack, yajl_state_parse_error); hand->parseError = "numeric (floating point) " "overflow"; /* try to restore error offset */ if (*offset >= bufLen) *offset -= bufLen; else *offset = 0; goto around_again; } _CC_CHK(hand->callbacks->yajl_double(hand->ctx, d)); } } break; case yajl_tok_right_brace: { if (yajl_bs_current(hand->stateStack) == yajl_state_array_start) { if (hand->callbacks && hand->callbacks->yajl_end_array) { _CC_CHK(hand->callbacks->yajl_end_array(hand->ctx)); } yajl_bs_pop(hand->stateStack); goto around_again; } /* intentional fall-through */ } case yajl_tok_colon: case yajl_tok_comma: case yajl_tok_right_bracket: yajl_bs_set(hand->stateStack, yajl_state_parse_error); hand->parseError = "unallowed token at this point in JSON text"; goto around_again; default: yajl_bs_set(hand->stateStack, yajl_state_parse_error); hand->parseError = "invalid token, internal error"; goto around_again; } /* got a value. transition depends on the state we're in. */ { yajl_state s = yajl_bs_current(hand->stateStack); if (s == yajl_state_start || s == yajl_state_got_value) { yajl_bs_set(hand->stateStack, yajl_state_parse_complete); } else if (s == yajl_state_map_need_val) { yajl_bs_set(hand->stateStack, yajl_state_map_got_val); } else { yajl_bs_set(hand->stateStack, yajl_state_array_got_val); } } if (stateToPush != yajl_state_start) { yajl_bs_push(hand->stateStack, stateToPush); } goto around_again; } case yajl_state_map_start: case yajl_state_map_need_key: { /* only difference between these two states is that in * start '}' is valid, whereas in need_key, we've parsed * a comma, and a string key _must_ follow */ tok = yajl_lex_lex(hand->lexer, jsonText, jsonTextLen, offset, &buf, &bufLen); switch (tok) { case yajl_tok_eof: return yajl_status_ok; case yajl_tok_error: yajl_bs_set(hand->stateStack, yajl_state_lexical_error); goto around_again; case yajl_tok_string_with_escapes: if (hand->callbacks && hand->callbacks->yajl_map_key) { yajl_buf_clear(hand->decodeBuf); yajl_string_decode(hand->decodeBuf, buf, bufLen); buf = yajl_buf_data(hand->decodeBuf); bufLen = yajl_buf_len(hand->decodeBuf); } /* intentional fall-through */ case yajl_tok_string: if (hand->callbacks && hand->callbacks->yajl_map_key) { _CC_CHK(hand->callbacks->yajl_map_key(hand->ctx, buf, bufLen)); } yajl_bs_set(hand->stateStack, yajl_state_map_sep); goto around_again; case yajl_tok_right_bracket: if (yajl_bs_current(hand->stateStack) == yajl_state_map_start) { if (hand->callbacks && hand->callbacks->yajl_end_map) { _CC_CHK(hand->callbacks->yajl_end_map(hand->ctx)); } yajl_bs_pop(hand->stateStack); goto around_again; } default: yajl_bs_set(hand->stateStack, yajl_state_parse_error); hand->parseError = "invalid object key (must be a string)"; goto around_again; } } case yajl_state_map_sep: { tok = yajl_lex_lex(hand->lexer, jsonText, jsonTextLen, offset, &buf, &bufLen); switch (tok) { case yajl_tok_colon: yajl_bs_set(hand->stateStack, yajl_state_map_need_val); goto around_again; case yajl_tok_eof: return yajl_status_ok; case yajl_tok_error: yajl_bs_set(hand->stateStack, yajl_state_lexical_error); goto around_again; default: yajl_bs_set(hand->stateStack, yajl_state_parse_error); hand->parseError = "object key and value must " "be separated by a colon (':')"; goto around_again; } } case yajl_state_map_got_val: { tok = yajl_lex_lex(hand->lexer, jsonText, jsonTextLen, offset, &buf, &bufLen); switch (tok) { case yajl_tok_right_bracket: if (hand->callbacks && hand->callbacks->yajl_end_map) { _CC_CHK(hand->callbacks->yajl_end_map(hand->ctx)); } yajl_bs_pop(hand->stateStack); goto around_again; case yajl_tok_comma: yajl_bs_set(hand->stateStack, yajl_state_map_need_key); goto around_again; case yajl_tok_eof: return yajl_status_ok; case yajl_tok_error: yajl_bs_set(hand->stateStack, yajl_state_lexical_error); goto around_again; default: yajl_bs_set(hand->stateStack, yajl_state_parse_error); hand->parseError = "after key and value, inside map, " "I expect ',' or '}'"; /* try to restore error offset */ if (*offset >= bufLen) *offset -= bufLen; else *offset = 0; goto around_again; } } case yajl_state_array_got_val: { tok = yajl_lex_lex(hand->lexer, jsonText, jsonTextLen, offset, &buf, &bufLen); switch (tok) { case yajl_tok_right_brace: if (hand->callbacks && hand->callbacks->yajl_end_array) { _CC_CHK(hand->callbacks->yajl_end_array(hand->ctx)); } yajl_bs_pop(hand->stateStack); goto around_again; case yajl_tok_comma: yajl_bs_set(hand->stateStack, yajl_state_array_need_val); goto around_again; case yajl_tok_eof: return yajl_status_ok; case yajl_tok_error: yajl_bs_set(hand->stateStack, yajl_state_lexical_error); goto around_again; default: yajl_bs_set(hand->stateStack, yajl_state_parse_error); hand->parseError = "after array element, I expect ',' or ']'"; goto around_again; } } } //comment out by jeroen for R CMD check //abort(); return yajl_status_error; } jsonlite/src/yajl/yajl_alloc.c0000644000176200001440000000275012626133701016111 0ustar liggesusers/* * Copyright (c) 2007-2014, Lloyd Hilaiel * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ /** * \file yajl_alloc.h * default memory allocation routines for yajl which use malloc/realloc and * free */ #include "yajl_alloc.h" #include static void * yajl_internal_malloc(void *ctx, size_t sz) { (void)ctx; return malloc(sz); } static void * yajl_internal_realloc(void *ctx, void * previous, size_t sz) { (void)ctx; return realloc(previous, sz); } static void yajl_internal_free(void *ctx, void * ptr) { (void)ctx; free(ptr); } void yajl_set_default_alloc_funcs(yajl_alloc_funcs * yaf) { yaf->malloc = yajl_internal_malloc; yaf->free = yajl_internal_free; yaf->realloc = yajl_internal_realloc; yaf->ctx = NULL; } jsonlite/src/yajl/yajl_version.c0000644000176200001440000000012112626133701016472 0ustar liggesusers#include int yajl_version(void) { return YAJL_VERSION; } jsonlite/src/yajl/yajl_lex.h0000644000176200001440000001012612626133701015610 0ustar liggesusers/* * Copyright (c) 2007-2014, Lloyd Hilaiel * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ #ifndef __YAJL_LEX_H__ #define __YAJL_LEX_H__ #include "api/yajl_common.h" typedef enum { yajl_tok_bool, yajl_tok_colon, yajl_tok_comma, yajl_tok_eof, yajl_tok_error, yajl_tok_left_brace, yajl_tok_left_bracket, yajl_tok_null, yajl_tok_right_brace, yajl_tok_right_bracket, /* we differentiate between integers and doubles to allow the * parser to interpret the number without re-scanning */ yajl_tok_integer, yajl_tok_double, /* we differentiate between strings which require further processing, * and strings that do not */ yajl_tok_string, yajl_tok_string_with_escapes, /* comment tokens are not currently returned to the parser, ever */ yajl_tok_comment } yajl_tok; typedef struct yajl_lexer_t * yajl_lexer; yajl_lexer yajl_lex_alloc(yajl_alloc_funcs * alloc, unsigned int allowComments, unsigned int validateUTF8); void yajl_lex_free(yajl_lexer lexer); /** * run/continue a lex. "offset" is an input/output parameter. * It should be initialized to zero for a * new chunk of target text, and upon subsetquent calls with the same * target text should passed with the value of the previous invocation. * * the client may be interested in the value of offset when an error is * returned from the lexer. This allows the client to render useful * error messages. * * When you pass the next chunk of data, context should be reinitialized * to zero. * * Finally, the output buffer is usually just a pointer into the jsonText, * however in cases where the entity being lexed spans multiple chunks, * the lexer will buffer the entity and the data returned will be * a pointer into that buffer. * * This behavior is abstracted from client code except for the performance * implications which require that the client choose a reasonable chunk * size to get adequate performance. */ yajl_tok yajl_lex_lex(yajl_lexer lexer, const unsigned char * jsonText, size_t jsonTextLen, size_t * offset, const unsigned char ** outBuf, size_t * outLen); /** have a peek at the next token, but don't move the lexer forward */ yajl_tok yajl_lex_peek(yajl_lexer lexer, const unsigned char * jsonText, size_t jsonTextLen, size_t offset); typedef enum { yajl_lex_e_ok = 0, yajl_lex_string_invalid_utf8, yajl_lex_string_invalid_escaped_char, yajl_lex_string_invalid_json_char, yajl_lex_string_invalid_hex_char, yajl_lex_invalid_char, yajl_lex_invalid_string, yajl_lex_missing_integer_after_decimal, yajl_lex_missing_integer_after_exponent, yajl_lex_missing_integer_after_minus, yajl_lex_unallowed_comment } yajl_lex_error; const char * yajl_lex_error_to_string(yajl_lex_error error); /** allows access to more specific information about the lexical * error when yajl_lex_lex returns yajl_tok_error. */ yajl_lex_error yajl_lex_get_error(yajl_lexer lexer); /** get the current offset into the most recently lexed json string. */ size_t yajl_lex_current_offset(yajl_lexer lexer); /** get the number of lines lexed by this lexer instance */ size_t yajl_lex_current_line(yajl_lexer lexer); /** get the number of chars lexed by this lexer instance since the last * \n or \r */ size_t yajl_lex_current_char(yajl_lexer lexer); #endif jsonlite/src/yajl/readme.txt0000644000176200001440000000112412540777273015641 0ustar liggesusersChanges in yajl code by Jeroen: - Manually changed the header include paths in some c/h files to avoid cmake dependency. - Comment out call to abort() in src/yajl/yajl_parser.c (for CMD check) - Manually generated yajl.version.h from yajl.version.h.in (by running cmake) - Patch for CMD check warnings on Windows: https://github.com/lloyd/yajl/issues/143 - Patch for error messages in yajl_tree_parse: https://github.com/lloyd/yajl/issues/144 - Fix for windows XP: https://rt.cpan.org/Public/Bug/Display.html?id=69113 - in yajl_tree.c added functions: push_parser_new and push_parser_get jsonlite/src/yajl/yajl.c0000644000176200001440000001152412626133701014736 0ustar liggesusers/* * Copyright (c) 2007-2014, Lloyd Hilaiel * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ #include "api/yajl_parse.h" #include "yajl_lex.h" #include "yajl_parser.h" #include "yajl_alloc.h" #include #include #include #include const char * yajl_status_to_string(yajl_status stat) { const char * statStr = "unknown"; switch (stat) { case yajl_status_ok: statStr = "ok, no error"; break; case yajl_status_client_canceled: statStr = "client canceled parse"; break; case yajl_status_error: statStr = "parse error"; break; } return statStr; } yajl_handle yajl_alloc(const yajl_callbacks * callbacks, yajl_alloc_funcs * afs, void * ctx) { yajl_handle hand = NULL; yajl_alloc_funcs afsBuffer; /* first order of business is to set up memory allocation routines */ if (afs != NULL) { if (afs->malloc == NULL || afs->realloc == NULL || afs->free == NULL) { return NULL; } } else { yajl_set_default_alloc_funcs(&afsBuffer); afs = &afsBuffer; } hand = (yajl_handle) YA_MALLOC(afs, sizeof(struct yajl_handle_t)); /* copy in pointers to allocation routines */ memcpy((void *) &(hand->alloc), (void *) afs, sizeof(yajl_alloc_funcs)); hand->callbacks = callbacks; hand->ctx = ctx; hand->lexer = NULL; hand->bytesConsumed = 0; hand->decodeBuf = yajl_buf_alloc(&(hand->alloc)); hand->flags = 0; yajl_bs_init(hand->stateStack, &(hand->alloc)); yajl_bs_push(hand->stateStack, yajl_state_start); return hand; } int yajl_config(yajl_handle h, yajl_option opt, ...) { int rv = 1; va_list ap; va_start(ap, opt); switch(opt) { case yajl_allow_comments: case yajl_dont_validate_strings: case yajl_allow_trailing_garbage: case yajl_allow_multiple_values: case yajl_allow_partial_values: if (va_arg(ap, int)) h->flags |= opt; else h->flags &= ~opt; break; default: rv = 0; } va_end(ap); return rv; } void yajl_free(yajl_handle handle) { yajl_bs_free(handle->stateStack); yajl_buf_free(handle->decodeBuf); if (handle->lexer) { yajl_lex_free(handle->lexer); handle->lexer = NULL; } YA_FREE(&(handle->alloc), handle); } yajl_status yajl_parse(yajl_handle hand, const unsigned char * jsonText, size_t jsonTextLen) { yajl_status status; /* lazy allocation of the lexer */ if (hand->lexer == NULL) { hand->lexer = yajl_lex_alloc(&(hand->alloc), hand->flags & yajl_allow_comments, !(hand->flags & yajl_dont_validate_strings)); } status = yajl_do_parse(hand, jsonText, jsonTextLen); return status; } yajl_status yajl_complete_parse(yajl_handle hand) { /* The lexer is lazy allocated in the first call to parse. if parse is * never called, then no data was provided to parse at all. This is a * "premature EOF" error unless yajl_allow_partial_values is specified. * allocating the lexer now is the simplest possible way to handle this * case while preserving all the other semantics of the parser * (multiple values, partial values, etc). */ if (hand->lexer == NULL) { hand->lexer = yajl_lex_alloc(&(hand->alloc), hand->flags & yajl_allow_comments, !(hand->flags & yajl_dont_validate_strings)); } return yajl_do_finish(hand); } unsigned char * yajl_get_error(yajl_handle hand, int verbose, const unsigned char * jsonText, size_t jsonTextLen) { return yajl_render_error_string(hand, jsonText, jsonTextLen, verbose); } size_t yajl_get_bytes_consumed(yajl_handle hand) { if (!hand) return 0; else return hand->bytesConsumed; } void yajl_free_error(yajl_handle hand, unsigned char * str) { /* use memory allocation functions if set */ YA_FREE(&(hand->alloc), str); } /* XXX: add utility routines to parse from file */ jsonlite/src/yajl/api/0000755000176200001440000000000012540777273014416 5ustar liggesusersjsonlite/src/yajl/api/yajl_gen.h0000644000176200001440000001604612626133701016351 0ustar liggesusers/* * Copyright (c) 2007-2014, Lloyd Hilaiel * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ /** * \file yajl_gen.h * Interface to YAJL's JSON generation facilities. */ #include #ifndef __YAJL_GEN_H__ #define __YAJL_GEN_H__ #include #ifdef __cplusplus extern "C" { #endif /** generator status codes */ typedef enum { /** no error */ yajl_gen_status_ok = 0, /** at a point where a map key is generated, a function other than * yajl_gen_string was called */ yajl_gen_keys_must_be_strings, /** YAJL's maximum generation depth was exceeded. see * YAJL_MAX_DEPTH */ yajl_max_depth_exceeded, /** A generator function (yajl_gen_XXX) was called while in an error * state */ yajl_gen_in_error_state, /** A complete JSON document has been generated */ yajl_gen_generation_complete, /** yajl_gen_double was passed an invalid floating point value * (infinity or NaN). */ yajl_gen_invalid_number, /** A print callback was passed in, so there is no internal * buffer to get from */ yajl_gen_no_buf, /** returned from yajl_gen_string() when the yajl_gen_validate_utf8 * option is enabled and an invalid was passed by client code. */ yajl_gen_invalid_string } yajl_gen_status; /** an opaque handle to a generator */ typedef struct yajl_gen_t * yajl_gen; /** a callback used for "printing" the results. */ typedef void (*yajl_print_t)(void * ctx, const char * str, size_t len); /** configuration parameters for the parser, these may be passed to * yajl_gen_config() along with option specific argument(s). In general, * all configuration parameters default to *off*. */ typedef enum { /** generate indented (beautiful) output */ yajl_gen_beautify = 0x01, /** * Set an indent string which is used when yajl_gen_beautify * is enabled. Maybe something like \\t or some number of * spaces. The default is four spaces ' '. */ yajl_gen_indent_string = 0x02, /** * Set a function and context argument that should be used to * output generated json. the function should conform to the * yajl_print_t prototype while the context argument is a * void * of your choosing. * * example: * yajl_gen_config(g, yajl_gen_print_callback, myFunc, myVoidPtr); */ yajl_gen_print_callback = 0x04, /** * Normally the generator does not validate that strings you * pass to it via yajl_gen_string() are valid UTF8. Enabling * this option will cause it to do so. */ yajl_gen_validate_utf8 = 0x08, /** * the forward solidus (slash or '/' in human) is not required to be * escaped in json text. By default, YAJL will not escape it in the * iterest of saving bytes. Setting this flag will cause YAJL to * always escape '/' in generated JSON strings. */ yajl_gen_escape_solidus = 0x10 } yajl_gen_option; /** allow the modification of generator options subsequent to handle * allocation (via yajl_alloc) * \returns zero in case of errors, non-zero otherwise */ YAJL_API int yajl_gen_config(yajl_gen g, yajl_gen_option opt, ...); /** allocate a generator handle * \param allocFuncs an optional pointer to a structure which allows * the client to overide the memory allocation * used by yajl. May be NULL, in which case * malloc/free/realloc will be used. * * \returns an allocated handle on success, NULL on failure (bad params) */ YAJL_API yajl_gen yajl_gen_alloc(const yajl_alloc_funcs * allocFuncs); /** free a generator handle */ YAJL_API void yajl_gen_free(yajl_gen handle); YAJL_API yajl_gen_status yajl_gen_integer(yajl_gen hand, long long int number); /** generate a floating point number. number may not be infinity or * NaN, as these have no representation in JSON. In these cases the * generator will return 'yajl_gen_invalid_number' */ YAJL_API yajl_gen_status yajl_gen_double(yajl_gen hand, double number); YAJL_API yajl_gen_status yajl_gen_number(yajl_gen hand, const char * num, size_t len); YAJL_API yajl_gen_status yajl_gen_string(yajl_gen hand, const unsigned char * str, size_t len); YAJL_API yajl_gen_status yajl_gen_null(yajl_gen hand); YAJL_API yajl_gen_status yajl_gen_bool(yajl_gen hand, int boolean); YAJL_API yajl_gen_status yajl_gen_map_open(yajl_gen hand); YAJL_API yajl_gen_status yajl_gen_map_close(yajl_gen hand); YAJL_API yajl_gen_status yajl_gen_array_open(yajl_gen hand); YAJL_API yajl_gen_status yajl_gen_array_close(yajl_gen hand); /** access the null terminated generator buffer. If incrementally * outputing JSON, one should call yajl_gen_clear to clear the * buffer. This allows stream generation. */ YAJL_API yajl_gen_status yajl_gen_get_buf(yajl_gen hand, const unsigned char ** buf, size_t * len); /** clear yajl's output buffer, but maintain all internal generation * state. This function will not "reset" the generator state, and is * intended to enable incremental JSON outputing. */ YAJL_API void yajl_gen_clear(yajl_gen hand); /** Reset the generator state. Allows a client to generate multiple * json entities in a stream. The "sep" string will be inserted to * separate the previously generated entity from the current, * NULL means *no separation* of entites (clients beware, generating * multiple JSON numbers without a separator, for instance, will result in ambiguous output) * * Note: this call will not clear yajl's output buffer. This * may be accomplished explicitly by calling yajl_gen_clear() */ YAJL_API void yajl_gen_reset(yajl_gen hand, const char * sep); #ifdef __cplusplus } #endif #endif jsonlite/src/yajl/api/yajl_version.h0000644000176200001440000000054612626133701017263 0ustar liggesusers#ifndef YAJL_VERSION_H_ #define YAJL_VERSION_H_ #include #define YAJL_MAJOR 2 #define YAJL_MINOR 1 #define YAJL_MICRO 1 #define YAJL_VERSION ((YAJL_MAJOR * 10000) + (YAJL_MINOR * 100) + YAJL_MICRO) #ifdef __cplusplus extern "C" { #endif extern int YAJL_API yajl_version(void); #ifdef __cplusplus } #endif #endif /* YAJL_VERSION_H_ */ jsonlite/src/yajl/api/yajl_parse.h0000644000176200001440000002312712626133701016710 0ustar liggesusers/* * Copyright (c) 2007-2014, Lloyd Hilaiel * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ /** * \file yajl_parse.h * Interface to YAJL's JSON stream parsing facilities. */ #include #ifndef __YAJL_PARSE_H__ #define __YAJL_PARSE_H__ #include #ifdef __cplusplus extern "C" { #endif /** error codes returned from this interface */ typedef enum { /** no error was encountered */ yajl_status_ok, /** a client callback returned zero, stopping the parse */ yajl_status_client_canceled, /** An error occured during the parse. Call yajl_get_error for * more information about the encountered error */ yajl_status_error } yajl_status; /** attain a human readable, english, string for an error */ YAJL_API const char * yajl_status_to_string(yajl_status code); /** an opaque handle to a parser */ typedef struct yajl_handle_t * yajl_handle; /** yajl is an event driven parser. this means as json elements are * parsed, you are called back to do something with the data. The * functions in this table indicate the various events for which * you will be called back. Each callback accepts a "context" * pointer, this is a void * that is passed into the yajl_parse * function which the client code may use to pass around context. * * All callbacks return an integer. If non-zero, the parse will * continue. If zero, the parse will be canceled and * yajl_status_client_canceled will be returned from the parse. * * \attention { * A note about the handling of numbers: * * yajl will only convert numbers that can be represented in a * double or a 64 bit (long long) int. All other numbers will * be passed to the client in string form using the yajl_number * callback. Furthermore, if yajl_number is not NULL, it will * always be used to return numbers, that is yajl_integer and * yajl_double will be ignored. If yajl_number is NULL but one * of yajl_integer or yajl_double are defined, parsing of a * number larger than is representable in a double or 64 bit * integer will result in a parse error. * } */ typedef struct { int (* yajl_null)(void * ctx); int (* yajl_boolean)(void * ctx, int boolVal); int (* yajl_integer)(void * ctx, long long integerVal); int (* yajl_double)(void * ctx, double doubleVal); /** A callback which passes the string representation of the number * back to the client. Will be used for all numbers when present */ int (* yajl_number)(void * ctx, const char * numberVal, size_t numberLen); /** strings are returned as pointers into the JSON text when, * possible, as a result, they are _not_ null padded */ int (* yajl_string)(void * ctx, const unsigned char * stringVal, size_t stringLen); int (* yajl_start_map)(void * ctx); int (* yajl_map_key)(void * ctx, const unsigned char * key, size_t stringLen); int (* yajl_end_map)(void * ctx); int (* yajl_start_array)(void * ctx); int (* yajl_end_array)(void * ctx); } yajl_callbacks; /** allocate a parser handle * \param callbacks a yajl callbacks structure specifying the * functions to call when different JSON entities * are encountered in the input text. May be NULL, * which is only useful for validation. * \param afs memory allocation functions, may be NULL for to use * C runtime library routines (malloc and friends) * \param ctx a context pointer that will be passed to callbacks. */ YAJL_API yajl_handle yajl_alloc(const yajl_callbacks * callbacks, yajl_alloc_funcs * afs, void * ctx); /** configuration parameters for the parser, these may be passed to * yajl_config() along with option specific argument(s). In general, * all configuration parameters default to *off*. */ typedef enum { /** Ignore javascript style comments present in * JSON input. Non-standard, but rather fun * arguments: toggled off with integer zero, on otherwise. * * example: * yajl_config(h, yajl_allow_comments, 1); // turn comment support on */ yajl_allow_comments = 0x01, /** * When set the parser will verify that all strings in JSON input are * valid UTF8 and will emit a parse error if this is not so. When set, * this option makes parsing slightly more expensive (~7% depending * on processor and compiler in use) * * example: * yajl_config(h, yajl_dont_validate_strings, 1); // disable utf8 checking */ yajl_dont_validate_strings = 0x02, /** * By default, upon calls to yajl_complete_parse(), yajl will * ensure the entire input text was consumed and will raise an error * otherwise. Enabling this flag will cause yajl to disable this * check. This can be useful when parsing json out of a that contains more * than a single JSON document. */ yajl_allow_trailing_garbage = 0x04, /** * Allow multiple values to be parsed by a single handle. The * entire text must be valid JSON, and values can be seperated * by any kind of whitespace. This flag will change the * behavior of the parser, and cause it continue parsing after * a value is parsed, rather than transitioning into a * complete state. This option can be useful when parsing multiple * values from an input stream. */ yajl_allow_multiple_values = 0x08, /** * When yajl_complete_parse() is called the parser will * check that the top level value was completely consumed. I.E., * if called whilst in the middle of parsing a value * yajl will enter an error state (premature EOF). Setting this * flag suppresses that check and the corresponding error. */ yajl_allow_partial_values = 0x10 } yajl_option; /** allow the modification of parser options subsequent to handle * allocation (via yajl_alloc) * \returns zero in case of errors, non-zero otherwise */ YAJL_API int yajl_config(yajl_handle h, yajl_option opt, ...); /** free a parser handle */ YAJL_API void yajl_free(yajl_handle handle); /** Parse some json! * \param hand - a handle to the json parser allocated with yajl_alloc * \param jsonText - a pointer to the UTF8 json text to be parsed * \param jsonTextLength - the length, in bytes, of input text */ YAJL_API yajl_status yajl_parse(yajl_handle hand, const unsigned char * jsonText, size_t jsonTextLength); /** Parse any remaining buffered json. * Since yajl is a stream-based parser, without an explicit end of * input, yajl sometimes can't decide if content at the end of the * stream is valid or not. For example, if "1" has been fed in, * yajl can't know whether another digit is next or some character * that would terminate the integer token. * * \param hand - a handle to the json parser allocated with yajl_alloc */ YAJL_API yajl_status yajl_complete_parse(yajl_handle hand); /** get an error string describing the state of the * parse. * * If verbose is non-zero, the message will include the JSON * text where the error occured, along with an arrow pointing to * the specific char. * * \returns A dynamically allocated string will be returned which should * be freed with yajl_free_error */ YAJL_API unsigned char * yajl_get_error(yajl_handle hand, int verbose, const unsigned char * jsonText, size_t jsonTextLength); /** * get the amount of data consumed from the last chunk passed to YAJL. * * In the case of a successful parse this can help you understand if * the entire buffer was consumed (which will allow you to handle * "junk at end of input"). * * In the event an error is encountered during parsing, this function * affords the client a way to get the offset into the most recent * chunk where the error occured. 0 will be returned if no error * was encountered. */ YAJL_API size_t yajl_get_bytes_consumed(yajl_handle hand); /** free an error returned from yajl_get_error */ YAJL_API void yajl_free_error(yajl_handle hand, unsigned char * str); #ifdef __cplusplus } #endif #endif jsonlite/src/yajl/api/yajl_common.h0000644000176200001440000000501412626133701017061 0ustar liggesusers/* * Copyright (c) 2007-2014, Lloyd Hilaiel * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ #ifndef __YAJL_COMMON_H__ #define __YAJL_COMMON_H__ #include #ifdef __cplusplus extern "C" { #endif #define YAJL_MAX_DEPTH 128 /* msft dll export gunk. To build a DLL on windows, you * must define WIN32, YAJL_SHARED, and YAJL_BUILD. To use a shared * DLL, you must define YAJL_SHARED and WIN32 */ #if (defined(_WIN32) || defined(WIN32)) && defined(YAJL_SHARED) # ifdef YAJL_BUILD # define YAJL_API __declspec(dllexport) # else # define YAJL_API __declspec(dllimport) # endif #else # if defined(__GNUC__) && (__GNUC__ * 100 + __GNUC_MINOR__) >= 303 # define YAJL_API __attribute__ ((visibility("default"))) # else # define YAJL_API # endif #endif /** pointer to a malloc function, supporting client overriding memory * allocation routines */ typedef void * (*yajl_malloc_func)(void *ctx, size_t sz); /** pointer to a free function, supporting client overriding memory * allocation routines */ typedef void (*yajl_free_func)(void *ctx, void * ptr); /** pointer to a realloc function which can resize an allocation. */ typedef void * (*yajl_realloc_func)(void *ctx, void * ptr, size_t sz); /** A structure which can be passed to yajl_*_alloc routines to allow the * client to specify memory allocation functions to be used. */ typedef struct { /** pointer to a function that can allocate uninitialized memory */ yajl_malloc_func malloc; /** pointer to a function that can resize memory allocations */ yajl_realloc_func realloc; /** pointer to a function that can free memory allocated using * reallocFunction or mallocFunction */ yajl_free_func free; /** a context pointer that will be passed to above allocation routines */ void * ctx; } yajl_alloc_funcs; #ifdef __cplusplus } #endif #endif jsonlite/src/yajl/api/yajl_tree.h0000644000176200001440000001577612626133701016550 0ustar liggesusers/* * Copyright (c) 2010-2011 Florian Forster * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ /** * \file yajl_tree.h * * Parses JSON data and returns the data in tree form. * * \author Florian Forster * \date August 2010 * * This interface makes quick parsing and extraction of * smallish JSON docs trivial: * * \include example/parse_config.c */ #ifndef YAJL_TREE_H #define YAJL_TREE_H 1 #include #ifdef __cplusplus extern "C" { #endif /** possible data types that a yajl_val_s can hold */ typedef enum { yajl_t_string = 1, yajl_t_number = 2, yajl_t_object = 3, yajl_t_array = 4, yajl_t_true = 5, yajl_t_false = 6, yajl_t_null = 7, /** The any type isn't valid for yajl_val_s.type, but can be * used as an argument to routines like yajl_tree_get(). */ yajl_t_any = 8 } yajl_type; #define YAJL_NUMBER_INT_VALID 0x01 #define YAJL_NUMBER_DOUBLE_VALID 0x02 /** A pointer to a node in the parse tree */ typedef struct yajl_val_s * yajl_val; /** * A JSON value representation capable of holding one of the seven * types above. For "string", "number", "object", and "array" * additional data is available in the union. The "YAJL_IS_*" * and "YAJL_GET_*" macros below allow type checking and convenient * value extraction. */ struct yajl_val_s { /** Type of the value contained. Use the "YAJL_IS_*" macros to check for a * specific type. */ yajl_type type; /** Type-specific data. You may use the "YAJL_GET_*" macros to access these * members. */ union { char * string; struct { long long i; /*< integer value, if representable. */ double d; /*< double value, if representable. */ char *r; /*< unparsed number in string form. */ /** Signals whether the \em i and \em d members are * valid. See \c YAJL_NUMBER_INT_VALID and * \c YAJL_NUMBER_DOUBLE_VALID. */ unsigned int flags; } number; struct { const char **keys; /*< Array of keys */ yajl_val *values; /*< Array of values. */ size_t len; /*< Number of key-value-pairs. */ } object; struct { yajl_val *values; /*< Array of elements. */ size_t len; /*< Number of elements. */ } array; } u; }; /** * Parse a string. * * Parses an null-terminated string containing JSON data and returns a pointer * to the top-level value (root of the parse tree). * * \param input Pointer to a null-terminated utf8 string containing * JSON data. * \param error_buffer Pointer to a buffer in which an error message will * be stored if \em yajl_tree_parse fails, or * \c NULL. The buffer will be initialized before * parsing, so its content will be destroyed even if * \em yajl_tree_parse succeeds. * \param error_buffer_size Size of the memory area pointed to by * \em error_buffer_size. If \em error_buffer_size is * \c NULL, this argument is ignored. * * \returns Pointer to the top-level value or \c NULL on error. The memory * pointed to must be freed using \em yajl_tree_free. In case of an error, a * null terminated message describing the error in more detail is stored in * \em error_buffer if it is not \c NULL. */ YAJL_API yajl_val yajl_tree_parse (const char *input, char *error_buffer, size_t error_buffer_size); /** * Free a parse tree returned by "yajl_tree_parse". * * \param v Pointer to a JSON value returned by "yajl_tree_parse". Passing NULL * is valid and results in a no-op. */ YAJL_API void yajl_tree_free (yajl_val v); /** * Access a nested value inside a tree. * * \param parent the node under which you'd like to extract values. * \param path A null terminated array of strings, each the name of an object key * \param type the yajl_type of the object you seek, or yajl_t_any if any will do. * * \returns a pointer to the found value, or NULL if we came up empty. * * Future Ideas: it'd be nice to move path to a string and implement support for * a teeny tiny micro language here, so you can extract array elements, do things * like .first and .last, even .length. Inspiration from JSONPath and css selectors? * No it wouldn't be fast, but that's not what this API is about. */ YAJL_API yajl_val yajl_tree_get(yajl_val parent, const char ** path, yajl_type type); /* Various convenience macros to check the type of a `yajl_val` */ #define YAJL_IS_STRING(v) (((v) != NULL) && ((v)->type == yajl_t_string)) #define YAJL_IS_NUMBER(v) (((v) != NULL) && ((v)->type == yajl_t_number)) #define YAJL_IS_INTEGER(v) (YAJL_IS_NUMBER(v) && ((v)->u.number.flags & YAJL_NUMBER_INT_VALID)) #define YAJL_IS_DOUBLE(v) (YAJL_IS_NUMBER(v) && ((v)->u.number.flags & YAJL_NUMBER_DOUBLE_VALID)) #define YAJL_IS_OBJECT(v) (((v) != NULL) && ((v)->type == yajl_t_object)) #define YAJL_IS_ARRAY(v) (((v) != NULL) && ((v)->type == yajl_t_array )) #define YAJL_IS_TRUE(v) (((v) != NULL) && ((v)->type == yajl_t_true )) #define YAJL_IS_FALSE(v) (((v) != NULL) && ((v)->type == yajl_t_false )) #define YAJL_IS_NULL(v) (((v) != NULL) && ((v)->type == yajl_t_null )) /** Given a yajl_val_string return a ptr to the bare string it contains, * or NULL if the value is not a string. */ #define YAJL_GET_STRING(v) (YAJL_IS_STRING(v) ? (v)->u.string : NULL) /** Get the string representation of a number. You should check type first, * perhaps using YAJL_IS_NUMBER */ #define YAJL_GET_NUMBER(v) ((v)->u.number.r) /** Get the double representation of a number. You should check type first, * perhaps using YAJL_IS_DOUBLE */ #define YAJL_GET_DOUBLE(v) ((v)->u.number.d) /** Get the 64bit (long long) integer representation of a number. You should * check type first, perhaps using YAJL_IS_INTEGER */ #define YAJL_GET_INTEGER(v) ((v)->u.number.i) /** Get a pointer to a yajl_val_object or NULL if the value is not an object. */ #define YAJL_GET_OBJECT(v) (YAJL_IS_OBJECT(v) ? &(v)->u.object : NULL) /** Get a pointer to a yajl_val_array or NULL if the value is not an object. */ #define YAJL_GET_ARRAY(v) (YAJL_IS_ARRAY(v) ? &(v)->u.array : NULL) #ifdef __cplusplus } #endif #endif /* YAJL_TREE_H */ jsonlite/src/yajl/yajl_buf.h0000644000176200001440000000350312626133701015575 0ustar liggesusers/* * Copyright (c) 2007-2014, Lloyd Hilaiel * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ #ifndef __YAJL_BUF_H__ #define __YAJL_BUF_H__ #include "api/yajl_common.h" #include "yajl_alloc.h" /* * Implementation/performance notes. If this were moved to a header * only implementation using #define's where possible we might be * able to sqeeze a little performance out of the guy by killing function * call overhead. YMMV. */ /** * yajl_buf is a buffer with exponential growth. the buffer ensures that * you are always null padded. */ typedef struct yajl_buf_t * yajl_buf; /* allocate a new buffer */ yajl_buf yajl_buf_alloc(yajl_alloc_funcs * alloc); /* free the buffer */ void yajl_buf_free(yajl_buf buf); /* append a number of bytes to the buffer */ void yajl_buf_append(yajl_buf buf, const void * data, size_t len); /* empty the buffer */ void yajl_buf_clear(yajl_buf buf); /* get a pointer to the beginning of the buffer */ const unsigned char * yajl_buf_data(yajl_buf buf); /* get the length of the buffer */ size_t yajl_buf_len(yajl_buf buf); /* truncate the buffer */ void yajl_buf_truncate(yajl_buf buf, size_t len); #endif jsonlite/src/yajl/yajl_parser.h0000644000176200001440000000470012626133701016315 0ustar liggesusers/* * Copyright (c) 2007-2014, Lloyd Hilaiel * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ #ifndef __YAJL_PARSER_H__ #define __YAJL_PARSER_H__ #include "api/yajl_parse.h" #include "yajl_bytestack.h" #include "yajl_buf.h" #include "yajl_lex.h" typedef enum { yajl_state_start = 0, yajl_state_parse_complete, yajl_state_parse_error, yajl_state_lexical_error, yajl_state_map_start, yajl_state_map_sep, yajl_state_map_need_val, yajl_state_map_got_val, yajl_state_map_need_key, yajl_state_array_start, yajl_state_array_got_val, yajl_state_array_need_val, yajl_state_got_value, } yajl_state; struct yajl_handle_t { const yajl_callbacks * callbacks; void * ctx; yajl_lexer lexer; const char * parseError; /* the number of bytes consumed from the last client buffer, * in the case of an error this will be an error offset, in the * case of an error this can be used as the error offset */ size_t bytesConsumed; /* temporary storage for decoded strings */ yajl_buf decodeBuf; /* a stack of states. access with yajl_state_XXX routines */ yajl_bytestack stateStack; /* memory allocation routines */ yajl_alloc_funcs alloc; /* bitfield */ unsigned int flags; }; yajl_status yajl_do_parse(yajl_handle handle, const unsigned char * jsonText, size_t jsonTextLen); yajl_status yajl_do_finish(yajl_handle handle); unsigned char * yajl_render_error_string(yajl_handle hand, const unsigned char * jsonText, size_t jsonTextLen, int verbose); /* A little built in integer parsing routine with the same semantics as strtol * that's unaffected by LOCALE. */ long long yajl_parse_integer(const unsigned char *number, unsigned int length); #endif jsonlite/src/yajl/yajl_lex.c0000644000176200001440000006313012626133701015606 0ustar liggesusers/* * Copyright (c) 2007-2014, Lloyd Hilaiel * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ #include "yajl_lex.h" #include "yajl_buf.h" #include #include #include #include #ifdef YAJL_LEXER_DEBUG static const char * tokToStr(yajl_tok tok) { switch (tok) { case yajl_tok_bool: return "bool"; case yajl_tok_colon: return "colon"; case yajl_tok_comma: return "comma"; case yajl_tok_eof: return "eof"; case yajl_tok_error: return "error"; case yajl_tok_left_brace: return "brace"; case yajl_tok_left_bracket: return "bracket"; case yajl_tok_null: return "null"; case yajl_tok_integer: return "integer"; case yajl_tok_double: return "double"; case yajl_tok_right_brace: return "brace"; case yajl_tok_right_bracket: return "bracket"; case yajl_tok_string: return "string"; case yajl_tok_string_with_escapes: return "string_with_escapes"; } return "unknown"; } #endif /* Impact of the stream parsing feature on the lexer: * * YAJL support stream parsing. That is, the ability to parse the first * bits of a chunk of JSON before the last bits are available (still on * the network or disk). This makes the lexer more complex. The * responsibility of the lexer is to handle transparently the case where * a chunk boundary falls in the middle of a token. This is * accomplished is via a buffer and a character reading abstraction. * * Overview of implementation * * When we lex to end of input string before end of token is hit, we * copy all of the input text composing the token into our lexBuf. * * Every time we read a character, we do so through the readChar function. * readChar's responsibility is to handle pulling all chars from the buffer * before pulling chars from input text */ struct yajl_lexer_t { /* the overal line and char offset into the data */ size_t lineOff; size_t charOff; /* error */ yajl_lex_error error; /* a input buffer to handle the case where a token is spread over * multiple chunks */ yajl_buf buf; /* in the case where we have data in the lexBuf, bufOff holds * the current offset into the lexBuf. */ size_t bufOff; /* are we using the lex buf? */ unsigned int bufInUse; /* shall we allow comments? */ unsigned int allowComments; /* shall we validate utf8 inside strings? */ unsigned int validateUTF8; yajl_alloc_funcs * alloc; }; #define readChar(lxr, txt, off) \ (((lxr)->bufInUse && yajl_buf_len((lxr)->buf) && lxr->bufOff < yajl_buf_len((lxr)->buf)) ? \ (*((const unsigned char *) yajl_buf_data((lxr)->buf) + ((lxr)->bufOff)++)) : \ ((txt)[(*(off))++])) #define unreadChar(lxr, off) ((*(off) > 0) ? (*(off))-- : ((lxr)->bufOff--)) yajl_lexer yajl_lex_alloc(yajl_alloc_funcs * alloc, unsigned int allowComments, unsigned int validateUTF8) { yajl_lexer lxr = (yajl_lexer) YA_MALLOC(alloc, sizeof(struct yajl_lexer_t)); memset((void *) lxr, 0, sizeof(struct yajl_lexer_t)); lxr->buf = yajl_buf_alloc(alloc); lxr->allowComments = allowComments; lxr->validateUTF8 = validateUTF8; lxr->alloc = alloc; return lxr; } void yajl_lex_free(yajl_lexer lxr) { yajl_buf_free(lxr->buf); YA_FREE(lxr->alloc, lxr); return; } /* a lookup table which lets us quickly determine three things: * VEC - valid escaped control char * note. the solidus '/' may be escaped or not. * IJC - invalid json char * VHC - valid hex char * NFP - needs further processing (from a string scanning perspective) * NUC - needs utf8 checking when enabled (from a string scanning perspective) */ #define VEC 0x01 #define IJC 0x02 #define VHC 0x04 #define NFP 0x08 #define NUC 0x10 static const char charLookupTable[256] = { /*00*/ IJC , IJC , IJC , IJC , IJC , IJC , IJC , IJC , /*08*/ IJC , IJC , IJC , IJC , IJC , IJC , IJC , IJC , /*10*/ IJC , IJC , IJC , IJC , IJC , IJC , IJC , IJC , /*18*/ IJC , IJC , IJC , IJC , IJC , IJC , IJC , IJC , /*20*/ 0 , 0 , NFP|VEC|IJC, 0 , 0 , 0 , 0 , 0 , /*28*/ 0 , 0 , 0 , 0 , 0 , 0 , 0 , VEC , /*30*/ VHC , VHC , VHC , VHC , VHC , VHC , VHC , VHC , /*38*/ VHC , VHC , 0 , 0 , 0 , 0 , 0 , 0 , /*40*/ 0 , VHC , VHC , VHC , VHC , VHC , VHC , 0 , /*48*/ 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , /*50*/ 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , /*58*/ 0 , 0 , 0 , 0 , NFP|VEC|IJC, 0 , 0 , 0 , /*60*/ 0 , VHC , VEC|VHC, VHC , VHC , VHC , VEC|VHC, 0 , /*68*/ 0 , 0 , 0 , 0 , 0 , 0 , VEC , 0 , /*70*/ 0 , 0 , VEC , 0 , VEC , 0 , 0 , 0 , /*78*/ 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC , NUC }; /** process a variable length utf8 encoded codepoint. * * returns: * yajl_tok_string - if valid utf8 char was parsed and offset was * advanced * yajl_tok_eof - if end of input was hit before validation could * complete * yajl_tok_error - if invalid utf8 was encountered * * NOTE: on error the offset will point to the first char of the * invalid utf8 */ #define UTF8_CHECK_EOF if (*offset >= jsonTextLen) { return yajl_tok_eof; } static yajl_tok yajl_lex_utf8_char(yajl_lexer lexer, const unsigned char * jsonText, size_t jsonTextLen, size_t * offset, unsigned char curChar) { if (curChar <= 0x7f) { /* single byte */ return yajl_tok_string; } else if ((curChar >> 5) == 0x6) { /* two byte */ UTF8_CHECK_EOF; curChar = readChar(lexer, jsonText, offset); if ((curChar >> 6) == 0x2) return yajl_tok_string; } else if ((curChar >> 4) == 0x0e) { /* three byte */ UTF8_CHECK_EOF; curChar = readChar(lexer, jsonText, offset); if ((curChar >> 6) == 0x2) { UTF8_CHECK_EOF; curChar = readChar(lexer, jsonText, offset); if ((curChar >> 6) == 0x2) return yajl_tok_string; } } else if ((curChar >> 3) == 0x1e) { /* four byte */ UTF8_CHECK_EOF; curChar = readChar(lexer, jsonText, offset); if ((curChar >> 6) == 0x2) { UTF8_CHECK_EOF; curChar = readChar(lexer, jsonText, offset); if ((curChar >> 6) == 0x2) { UTF8_CHECK_EOF; curChar = readChar(lexer, jsonText, offset); if ((curChar >> 6) == 0x2) return yajl_tok_string; } } } return yajl_tok_error; } /* lex a string. input is the lexer, pointer to beginning of * json text, and start of string (offset). * a token is returned which has the following meanings: * yajl_tok_string: lex of string was successful. offset points to * terminating '"'. * yajl_tok_eof: end of text was encountered before we could complete * the lex. * yajl_tok_error: embedded in the string were unallowable chars. offset * points to the offending char */ #define STR_CHECK_EOF \ if (*offset >= jsonTextLen) { \ tok = yajl_tok_eof; \ goto finish_string_lex; \ } /** scan a string for interesting characters that might need further * review. return the number of chars that are uninteresting and can * be skipped. * (lth) hi world, any thoughts on how to make this routine faster? */ static size_t yajl_string_scan(const unsigned char * buf, size_t len, int utf8check) { unsigned char mask = IJC|NFP|(utf8check ? NUC : 0); size_t skip = 0; while (skip < len && !(charLookupTable[*buf] & mask)) { skip++; buf++; } return skip; } static yajl_tok yajl_lex_string(yajl_lexer lexer, const unsigned char * jsonText, size_t jsonTextLen, size_t * offset) { yajl_tok tok = yajl_tok_error; int hasEscapes = 0; for (;;) { unsigned char curChar; /* now jump into a faster scanning routine to skip as much * of the buffers as possible */ { const unsigned char * p; size_t len; if ((lexer->bufInUse && yajl_buf_len(lexer->buf) && lexer->bufOff < yajl_buf_len(lexer->buf))) { p = ((const unsigned char *) yajl_buf_data(lexer->buf) + (lexer->bufOff)); len = yajl_buf_len(lexer->buf) - lexer->bufOff; lexer->bufOff += yajl_string_scan(p, len, lexer->validateUTF8); } else if (*offset < jsonTextLen) { p = jsonText + *offset; len = jsonTextLen - *offset; *offset += yajl_string_scan(p, len, lexer->validateUTF8); } } STR_CHECK_EOF; curChar = readChar(lexer, jsonText, offset); /* quote terminates */ if (curChar == '"') { tok = yajl_tok_string; break; } /* backslash escapes a set of control chars, */ else if (curChar == '\\') { hasEscapes = 1; STR_CHECK_EOF; /* special case \u */ curChar = readChar(lexer, jsonText, offset); if (curChar == 'u') { unsigned int i = 0; for (i=0;i<4;i++) { STR_CHECK_EOF; curChar = readChar(lexer, jsonText, offset); if (!(charLookupTable[curChar] & VHC)) { /* back up to offending char */ unreadChar(lexer, offset); lexer->error = yajl_lex_string_invalid_hex_char; goto finish_string_lex; } } } else if (!(charLookupTable[curChar] & VEC)) { /* back up to offending char */ unreadChar(lexer, offset); lexer->error = yajl_lex_string_invalid_escaped_char; goto finish_string_lex; } } /* when not validating UTF8 it's a simple table lookup to determine * if the present character is invalid */ else if(charLookupTable[curChar] & IJC) { /* back up to offending char */ unreadChar(lexer, offset); lexer->error = yajl_lex_string_invalid_json_char; goto finish_string_lex; } /* when in validate UTF8 mode we need to do some extra work */ else if (lexer->validateUTF8) { yajl_tok t = yajl_lex_utf8_char(lexer, jsonText, jsonTextLen, offset, curChar); if (t == yajl_tok_eof) { tok = yajl_tok_eof; goto finish_string_lex; } else if (t == yajl_tok_error) { lexer->error = yajl_lex_string_invalid_utf8; goto finish_string_lex; } } /* accept it, and move on */ } finish_string_lex: /* tell our buddy, the parser, wether he needs to process this string * again */ if (hasEscapes && tok == yajl_tok_string) { tok = yajl_tok_string_with_escapes; } return tok; } #define RETURN_IF_EOF if (*offset >= jsonTextLen) return yajl_tok_eof; static yajl_tok yajl_lex_number(yajl_lexer lexer, const unsigned char * jsonText, size_t jsonTextLen, size_t * offset) { /** XXX: numbers are the only entities in json that we must lex * _beyond_ in order to know that they are complete. There * is an ambiguous case for integers at EOF. */ unsigned char c; yajl_tok tok = yajl_tok_integer; RETURN_IF_EOF; c = readChar(lexer, jsonText, offset); /* optional leading minus */ if (c == '-') { RETURN_IF_EOF; c = readChar(lexer, jsonText, offset); } /* a single zero, or a series of integers */ if (c == '0') { RETURN_IF_EOF; c = readChar(lexer, jsonText, offset); } else if (c >= '1' && c <= '9') { do { RETURN_IF_EOF; c = readChar(lexer, jsonText, offset); } while (c >= '0' && c <= '9'); } else { unreadChar(lexer, offset); lexer->error = yajl_lex_missing_integer_after_minus; return yajl_tok_error; } /* optional fraction (indicates this is floating point) */ if (c == '.') { int numRd = 0; RETURN_IF_EOF; c = readChar(lexer, jsonText, offset); while (c >= '0' && c <= '9') { numRd++; RETURN_IF_EOF; c = readChar(lexer, jsonText, offset); } if (!numRd) { unreadChar(lexer, offset); lexer->error = yajl_lex_missing_integer_after_decimal; return yajl_tok_error; } tok = yajl_tok_double; } /* optional exponent (indicates this is floating point) */ if (c == 'e' || c == 'E') { RETURN_IF_EOF; c = readChar(lexer, jsonText, offset); /* optional sign */ if (c == '+' || c == '-') { RETURN_IF_EOF; c = readChar(lexer, jsonText, offset); } if (c >= '0' && c <= '9') { do { RETURN_IF_EOF; c = readChar(lexer, jsonText, offset); } while (c >= '0' && c <= '9'); } else { unreadChar(lexer, offset); lexer->error = yajl_lex_missing_integer_after_exponent; return yajl_tok_error; } tok = yajl_tok_double; } /* we always go "one too far" */ unreadChar(lexer, offset); return tok; } static yajl_tok yajl_lex_comment(yajl_lexer lexer, const unsigned char * jsonText, size_t jsonTextLen, size_t * offset) { unsigned char c; yajl_tok tok = yajl_tok_comment; RETURN_IF_EOF; c = readChar(lexer, jsonText, offset); /* either slash or star expected */ if (c == '/') { /* now we throw away until end of line */ do { RETURN_IF_EOF; c = readChar(lexer, jsonText, offset); } while (c != '\n'); } else if (c == '*') { /* now we throw away until end of comment */ for (;;) { RETURN_IF_EOF; c = readChar(lexer, jsonText, offset); if (c == '*') { RETURN_IF_EOF; c = readChar(lexer, jsonText, offset); if (c == '/') { break; } else { unreadChar(lexer, offset); } } } } else { lexer->error = yajl_lex_invalid_char; tok = yajl_tok_error; } return tok; } yajl_tok yajl_lex_lex(yajl_lexer lexer, const unsigned char * jsonText, size_t jsonTextLen, size_t * offset, const unsigned char ** outBuf, size_t * outLen) { yajl_tok tok = yajl_tok_error; unsigned char c; size_t startOffset = *offset; *outBuf = NULL; *outLen = 0; for (;;) { assert(*offset <= jsonTextLen); if (*offset >= jsonTextLen) { tok = yajl_tok_eof; goto lexed; } c = readChar(lexer, jsonText, offset); switch (c) { case '{': tok = yajl_tok_left_bracket; goto lexed; case '}': tok = yajl_tok_right_bracket; goto lexed; case '[': tok = yajl_tok_left_brace; goto lexed; case ']': tok = yajl_tok_right_brace; goto lexed; case ',': tok = yajl_tok_comma; goto lexed; case ':': tok = yajl_tok_colon; goto lexed; case '\t': case '\n': case '\v': case '\f': case '\r': case ' ': startOffset++; break; case 't': { const char * want = "rue"; do { if (*offset >= jsonTextLen) { tok = yajl_tok_eof; goto lexed; } c = readChar(lexer, jsonText, offset); if (c != *want) { unreadChar(lexer, offset); lexer->error = yajl_lex_invalid_string; tok = yajl_tok_error; goto lexed; } } while (*(++want)); tok = yajl_tok_bool; goto lexed; } case 'f': { const char * want = "alse"; do { if (*offset >= jsonTextLen) { tok = yajl_tok_eof; goto lexed; } c = readChar(lexer, jsonText, offset); if (c != *want) { unreadChar(lexer, offset); lexer->error = yajl_lex_invalid_string; tok = yajl_tok_error; goto lexed; } } while (*(++want)); tok = yajl_tok_bool; goto lexed; } case 'n': { const char * want = "ull"; do { if (*offset >= jsonTextLen) { tok = yajl_tok_eof; goto lexed; } c = readChar(lexer, jsonText, offset); if (c != *want) { unreadChar(lexer, offset); lexer->error = yajl_lex_invalid_string; tok = yajl_tok_error; goto lexed; } } while (*(++want)); tok = yajl_tok_null; goto lexed; } case '"': { tok = yajl_lex_string(lexer, (const unsigned char *) jsonText, jsonTextLen, offset); goto lexed; } case '-': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': { /* integer parsing wants to start from the beginning */ unreadChar(lexer, offset); tok = yajl_lex_number(lexer, (const unsigned char *) jsonText, jsonTextLen, offset); goto lexed; } case '/': /* hey, look, a probable comment! If comments are disabled * it's an error. */ if (!lexer->allowComments) { unreadChar(lexer, offset); lexer->error = yajl_lex_unallowed_comment; tok = yajl_tok_error; goto lexed; } /* if comments are enabled, then we should try to lex * the thing. possible outcomes are * - successful lex (tok_comment, which means continue), * - malformed comment opening (slash not followed by * '*' or '/') (tok_error) * - eof hit. (tok_eof) */ tok = yajl_lex_comment(lexer, (const unsigned char *) jsonText, jsonTextLen, offset); if (tok == yajl_tok_comment) { /* "error" is silly, but that's the initial * state of tok. guilty until proven innocent. */ tok = yajl_tok_error; yajl_buf_clear(lexer->buf); lexer->bufInUse = 0; startOffset = *offset; break; } /* hit error or eof, bail */ goto lexed; default: lexer->error = yajl_lex_invalid_char; tok = yajl_tok_error; goto lexed; } } lexed: /* need to append to buffer if the buffer is in use or * if it's an EOF token */ if (tok == yajl_tok_eof || lexer->bufInUse) { if (!lexer->bufInUse) yajl_buf_clear(lexer->buf); lexer->bufInUse = 1; yajl_buf_append(lexer->buf, jsonText + startOffset, *offset - startOffset); lexer->bufOff = 0; if (tok != yajl_tok_eof) { *outBuf = yajl_buf_data(lexer->buf); *outLen = yajl_buf_len(lexer->buf); lexer->bufInUse = 0; } } else if (tok != yajl_tok_error) { *outBuf = jsonText + startOffset; *outLen = *offset - startOffset; } /* special case for strings. skip the quotes. */ if (tok == yajl_tok_string || tok == yajl_tok_string_with_escapes) { assert(*outLen >= 2); (*outBuf)++; *outLen -= 2; } #ifdef YAJL_LEXER_DEBUG if (tok == yajl_tok_error) { printf("lexical error: %s\n", yajl_lex_error_to_string(yajl_lex_get_error(lexer))); } else if (tok == yajl_tok_eof) { printf("EOF hit\n"); } else { printf("lexed %s: '", tokToStr(tok)); fwrite(*outBuf, 1, *outLen, stdout); printf("'\n"); } #endif return tok; } const char * yajl_lex_error_to_string(yajl_lex_error error) { switch (error) { case yajl_lex_e_ok: return "ok, no error"; case yajl_lex_string_invalid_utf8: return "invalid bytes in UTF8 string."; case yajl_lex_string_invalid_escaped_char: return "inside a string, '\\' occurs before a character " "which it may not."; case yajl_lex_string_invalid_json_char: return "invalid character inside string."; case yajl_lex_string_invalid_hex_char: return "invalid (non-hex) character occurs after '\\u' inside " "string."; case yajl_lex_invalid_char: return "invalid char in json text."; case yajl_lex_invalid_string: return "invalid string in json text."; case yajl_lex_missing_integer_after_exponent: return "malformed number, a digit is required after the exponent."; case yajl_lex_missing_integer_after_decimal: return "malformed number, a digit is required after the " "decimal point."; case yajl_lex_missing_integer_after_minus: return "malformed number, a digit is required after the " "minus sign."; case yajl_lex_unallowed_comment: return "probable comment found in input text, comments are " "not enabled."; } return "unknown error code"; } /** allows access to more specific information about the lexical * error when yajl_lex_lex returns yajl_tok_error. */ yajl_lex_error yajl_lex_get_error(yajl_lexer lexer) { if (lexer == NULL) return (yajl_lex_error) -1; return lexer->error; } size_t yajl_lex_current_line(yajl_lexer lexer) { return lexer->lineOff; } size_t yajl_lex_current_char(yajl_lexer lexer) { return lexer->charOff; } yajl_tok yajl_lex_peek(yajl_lexer lexer, const unsigned char * jsonText, size_t jsonTextLen, size_t offset) { const unsigned char * outBuf; size_t outLen; size_t bufLen = yajl_buf_len(lexer->buf); size_t bufOff = lexer->bufOff; unsigned int bufInUse = lexer->bufInUse; yajl_tok tok; tok = yajl_lex_lex(lexer, jsonText, jsonTextLen, &offset, &outBuf, &outLen); lexer->bufOff = bufOff; lexer->bufInUse = bufInUse; yajl_buf_truncate(lexer->buf, bufLen); return tok; } jsonlite/src/yajl/yajl_encode.h0000644000176200001440000000243012626133701016254 0ustar liggesusers/* * Copyright (c) 2007-2014, Lloyd Hilaiel * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ #ifndef __YAJL_ENCODE_H__ #define __YAJL_ENCODE_H__ #include "yajl_buf.h" #include "api/yajl_gen.h" void yajl_string_encode(const yajl_print_t printer, void * ctx, const unsigned char * str, size_t length, int escape_solidus); void yajl_string_decode(yajl_buf buf, const unsigned char * str, size_t length); int yajl_string_validate_utf8(const unsigned char * s, size_t len); #endif jsonlite/src/yajl/yajl_buf.c0000644000176200001440000000473212626133701015575 0ustar liggesusers/* * Copyright (c) 2007-2014, Lloyd Hilaiel * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ #include "yajl_buf.h" #include #include #include #define YAJL_BUF_INIT_SIZE 2048 struct yajl_buf_t { size_t len; size_t used; unsigned char * data; yajl_alloc_funcs * alloc; }; static void yajl_buf_ensure_available(yajl_buf buf, size_t want) { size_t need; assert(buf != NULL); /* first call */ if (buf->data == NULL) { buf->len = YAJL_BUF_INIT_SIZE; buf->data = (unsigned char *) YA_MALLOC(buf->alloc, buf->len); buf->data[0] = 0; } need = buf->len; while (want >= (need - buf->used)) need <<= 1; if (need != buf->len) { buf->data = (unsigned char *) YA_REALLOC(buf->alloc, buf->data, need); buf->len = need; } } yajl_buf yajl_buf_alloc(yajl_alloc_funcs * alloc) { yajl_buf b = YA_MALLOC(alloc, sizeof(struct yajl_buf_t)); memset((void *) b, 0, sizeof(struct yajl_buf_t)); b->alloc = alloc; return b; } void yajl_buf_free(yajl_buf buf) { assert(buf != NULL); if (buf->data) YA_FREE(buf->alloc, buf->data); YA_FREE(buf->alloc, buf); } void yajl_buf_append(yajl_buf buf, const void * data, size_t len) { yajl_buf_ensure_available(buf, len); if (len > 0) { assert(data != NULL); memcpy(buf->data + buf->used, data, len); buf->used += len; buf->data[buf->used] = 0; } } void yajl_buf_clear(yajl_buf buf) { buf->used = 0; if (buf->data) buf->data[buf->used] = 0; } const unsigned char * yajl_buf_data(yajl_buf buf) { return buf->data; } size_t yajl_buf_len(yajl_buf buf) { return buf->used; } void yajl_buf_truncate(yajl_buf buf, size_t len) { assert(len <= buf->used); buf->used = len; } jsonlite/src/yajl/yajl_bytestack.h0000644000176200001440000000451312626133701017014 0ustar liggesusers/* * Copyright (c) 2007-2014, Lloyd Hilaiel * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ /* * A header only implementation of a simple stack of bytes, used in YAJL * to maintain parse state. */ #ifndef __YAJL_BYTESTACK_H__ #define __YAJL_BYTESTACK_H__ #include "api/yajl_common.h" #define YAJL_BS_INC 128 typedef struct yajl_bytestack_t { unsigned char * stack; size_t size; size_t used; yajl_alloc_funcs * yaf; } yajl_bytestack; /* initialize a bytestack */ #define yajl_bs_init(obs, _yaf) { \ (obs).stack = NULL; \ (obs).size = 0; \ (obs).used = 0; \ (obs).yaf = (_yaf); \ } \ /* initialize a bytestack */ #define yajl_bs_free(obs) \ if ((obs).stack) (obs).yaf->free((obs).yaf->ctx, (obs).stack); #define yajl_bs_current(obs) \ (assert((obs).used > 0), (obs).stack[(obs).used - 1]) #define yajl_bs_push(obs, byte) { \ if (((obs).size - (obs).used) == 0) { \ (obs).size += YAJL_BS_INC; \ (obs).stack = (obs).yaf->realloc((obs).yaf->ctx,\ (void *) (obs).stack, (obs).size);\ } \ (obs).stack[((obs).used)++] = (byte); \ } /* removes the top item of the stack, returns nothing */ #define yajl_bs_pop(obs) { ((obs).used)--; } #define yajl_bs_set(obs, byte) \ (obs).stack[((obs).used) - 1] = (byte); #endif jsonlite/src/validate.c0000644000176200001440000000241012626133701014623 0ustar liggesusers#include #include #include SEXP R_validate(SEXP x) { /* get data from R */ const char* json = translateCharUTF8(asChar(x)); /* test for BOM */ if(json[0] == '\xEF' && json[1] == '\xBB' && json[2] == '\xBF'){ SEXP output = duplicate(ScalarLogical(0)); setAttrib(output, install("err"), mkString("JSON string contains UTF8 byte-order-mark.")); return(output); } /* allocate a parser */ yajl_handle hand = yajl_alloc(NULL, NULL, NULL); /* parser options */ //yajl_config(hand, yajl_dont_validate_strings, 1); /* go parse */ const size_t rd = strlen(json); yajl_status stat = yajl_parse(hand, (const unsigned char*) json, rd); if(stat == yajl_status_ok) { stat = yajl_complete_parse(hand); } SEXP output = PROTECT(duplicate(ScalarLogical(!stat))); //error message if (stat != yajl_status_ok) { unsigned char* str = yajl_get_error(hand, 1, (const unsigned char*) json, rd); SEXP errstr = mkString((const char *) str); yajl_free_error(hand, str); setAttrib(output, install("err"), errstr); } /* return boolean vec (0 means no errors, means is valid) */ yajl_free(hand); UNPROTECT(1); return output; } jsonlite/src/row_collapse.c0000644000176200001440000000306112626133701015526 0ustar liggesusers#include #include #include SEXP C_collapse_object(SEXP x, SEXP y); SEXP C_collapse_array(SEXP x); SEXP C_collapse_array_pretty_inner(SEXP x, SEXP indent); SEXP C_collapse_object_pretty(SEXP x, SEXP y, SEXP indent); SEXP C_row_collapse_object(SEXP names, SEXP m, SEXP indent){ //get matrix dimensions int *dims = INTEGER(getAttrib(m, install("dim"))); int x = dims[0]; int y = dims[1]; //allocate the output vector SEXP out = PROTECT(allocVector(STRSXP, x)); SEXP vec = PROTECT(allocVector(STRSXP, y)); for(int i = 0; i < x; i++) { for(int j = 0; j < y; j++) { SET_STRING_ELT(vec, j, STRING_ELT(m, j*x + i)); } if(asInteger(indent) == NA_INTEGER){ SET_STRING_ELT(out, i, asChar(C_collapse_object(names, vec))); } else { SET_STRING_ELT(out, i, asChar(C_collapse_object_pretty(names, vec, indent))); } } UNPROTECT(2); return out; } SEXP C_row_collapse_array(SEXP m, SEXP indent){ //get matrix dimensions int *dims = INTEGER(getAttrib(m, install("dim"))); int x = dims[0]; int y = dims[1]; //allocate the output vector SEXP out = PROTECT(allocVector(STRSXP, x)); SEXP vec = PROTECT(allocVector(STRSXP, y)); for(int i = 0; i < x; i++) { for(int j = 0; j < y; j++) { SET_STRING_ELT(vec, j, STRING_ELT(m, j*x + i)); } if(asInteger(indent) == NA_INTEGER){ SET_STRING_ELT(out, i, asChar(C_collapse_array(vec))); } else { SET_STRING_ELT(out, i, asChar(C_collapse_array_pretty_inner(vec, indent))); } } UNPROTECT(2); return out; } jsonlite/src/collapse_object.c0000644000176200001440000000245212626133701016170 0ustar liggesusers#include #include #include SEXP C_collapse_object(SEXP x, SEXP y) { if (!isString(x) || !isString(y)) error("x and y must character vectors."); int len = length(x); if (len != length(y)) error("x and y must same length."); size_t nchar_total = 0; for (int i=0; i #include #include // other interesting references on num to string convesion // http://www.jb.man.ac.uk/~slowe/cpp/itoa.html // and http://www.ddj.com/dept/cpp/184401596?pgno=6 // Version 19-Nov-2007 // Fixed round-to-even rules to match printf // thanks to Johannes Otepka /** * Powers of 10 * 10^0 to 10^9 */ static const double poww10[] = {1, 10, 100, 1000, 10000, 100000, 1000000, 10000000, 100000000, 1000000000}; static void strreverse(char* begin, char* end) { char aux; while (end > begin) aux = *end, *end-- = *begin, *begin++ = aux; } void modp_itoa10(int32_t value, char* str) { char* wstr=str; // Take care of sign unsigned int uvalue = (value < 0) ? -value : value; // Conversion. Number is reversed. do *wstr++ = (char)(48 + (uvalue % 10)); while(uvalue /= 10); if (value < 0) *wstr++ = '-'; *wstr='\0'; // Reverse string strreverse(str,wstr-1); } void modp_uitoa10(uint32_t value, char* str) { char* wstr=str; // Conversion. Number is reversed. do *wstr++ = (char)(48 + (value % 10)); while (value /= 10); *wstr='\0'; // Reverse string strreverse(str, wstr-1); } void modp_litoa10(int64_t value, char* str) { char* wstr=str; unsigned long uvalue = (value < 0) ? -value : value; // Conversion. Number is reversed. do *wstr++ = (char)(48 + (uvalue % 10)); while(uvalue /= 10); if (value < 0) *wstr++ = '-'; *wstr='\0'; // Reverse string strreverse(str,wstr-1); } void modp_ulitoa10(uint64_t value, char* str) { char* wstr=str; // Conversion. Number is reversed. do *wstr++ = (char)(48 + (value % 10)); while (value /= 10); *wstr='\0'; // Reverse string strreverse(str, wstr-1); } void modp_dtoa(double value, char* str, int prec) { /* Hacky test for NaN * under -fast-math this won't work, but then you also won't * have correct nan values anyways. The alternative is * to link with libmath (bad) or hack IEEE double bits (bad) */ if (! (value == value)) { str[0] = 'n'; str[1] = 'a'; str[2] = 'n'; str[3] = '\0'; return; } /* if input is larger than thres_max, revert to exponential */ const double thres_max = (double)(0x7FFFFFFF); double diff = 0.0; char* wstr = str; if (prec < 0) { prec = 0; } else if (prec > 9) { /* precision of >= 10 can lead to overflow errors */ prec = 9; } /* we'll work in positive values and deal with the negative sign issue later */ int neg = 0; if (value < 0) { neg = 1; value = -value; } int whole = (int) value; double tmp = (value - whole) * poww10[prec]; uint32_t frac = (uint32_t)(tmp); diff = tmp - frac; if (diff > 0.5) { ++frac; /* handle rollover, e.g. case 0.99 with prec 1 is 1.0 */ if (frac >= poww10[prec]) { frac = 0; ++whole; } } else if (diff == 0.5 && ((frac == 0) || (frac & 1))) { /* if halfway, round up if odd, OR if last digit is 0. That last part is strange */ ++frac; } /* for very large numbers switch back to native sprintf for exponentials. anyone want to write code to replace this? */ /* normal printf behavior is to print EVERY whole number digit which can be 100s of characters overflowing your buffers == bad */ if (value > thres_max) { sprintf(str, "%e", neg ? -value : value); return; } if (prec == 0) { diff = value - whole; if (diff > 0.5) { /* greater than 0.5, round up, e.g. 1.6 -> 2 */ ++whole; } else if (diff == 0.5 && (whole & 1)) { /* exactly 0.5 and ODD, then round up */ /* 1.5 -> 2, but 2.5 -> 2 */ ++whole; } } else { int count = prec; // now do fractional part, as an unsigned number do { --count; *wstr++ = (char)(48 + (frac % 10)); } while (frac /= 10); // add extra 0s while (count-- > 0) *wstr++ = '0'; // add decimal *wstr++ = '.'; } // do whole part // Take care of sign // Conversion. Number is reversed. do *wstr++ = (char)(48 + (whole % 10)); while (whole /= 10); if (neg) { *wstr++ = '-'; } *wstr='\0'; strreverse(str, wstr-1); } // This is near identical to modp_dtoa above // The differnce is noted below void modp_dtoa2(double value, char* str, int prec) { /* Hacky test for NaN * under -fast-math this won't work, but then you also won't * have correct nan values anyways. The alternative is * to link with libmath (bad) or hack IEEE double bits (bad) */ if (! (value == value)) { str[0] = 'n'; str[1] = 'a'; str[2] = 'n'; str[3] = '\0'; return; } /* if input is larger than thres_max, revert to exponential */ const double thres_max = (double)(0x7FFFFFFF); int count; double diff = 0.0; char* wstr = str; if (prec < 0) { prec = 0; } else if (prec > 9) { /* precision of >= 10 can lead to overflow errors */ prec = 9; } /* we'll work in positive values and deal with the negative sign issue later */ int neg = 0; if (value < 0) { neg = 1; value = -value; } int whole = (int) value; double tmp = (value - whole) * poww10[prec]; uint32_t frac = (uint32_t)(tmp); diff = tmp - frac; if (diff > 0.5) { ++frac; /* handle rollover, e.g. case 0.99 with prec 1 is 1.0 */ if (frac >= poww10[prec]) { frac = 0; ++whole; } } else if (diff == 0.5 && ((frac == 0) || (frac & 1))) { /* if halfway, round up if odd, OR if last digit is 0. That last part is strange */ ++frac; } /* for very large numbers switch back to native sprintf for exponentials. anyone want to write code to replace this? */ /* normal printf behavior is to print EVERY whole number digit which can be 100s of characters overflowing your buffers == bad */ if (value > thres_max) { sprintf(str, "%e", neg ? -value : value); return; } if (prec == 0) { diff = value - whole; if (diff > 0.5) { /* greater than 0.5, round up, e.g. 1.6 -> 2 */ ++whole; } else if (diff == 0.5 && (whole & 1)) { /* exactly 0.5 and ODD, then round up */ /* 1.5 -> 2, but 2.5 -> 2 */ ++whole; } //vvvvvvvvvvvvvvvvvvv Diff from modp_dto2 } else if (frac) { count = prec; // now do fractional part, as an unsigned number // we know it is not 0 but we can have leading zeros, these // should be removed while (!(frac % 10)) { --count; frac /= 10; } //^^^^^^^^^^^^^^^^^^^ Diff from modp_dto2 // now do fractional part, as an unsigned number do { --count; *wstr++ = (char)(48 + (frac % 10)); } while (frac /= 10); // add extra 0s while (count-- > 0) *wstr++ = '0'; // add decimal *wstr++ = '.'; } // do whole part // Take care of sign // Conversion. Number is reversed. do *wstr++ = (char)(48 + (whole % 10)); while (whole /= 10); if (neg) { *wstr++ = '-'; } *wstr='\0'; strreverse(str, wstr-1); } jsonlite/src/push_parser.c0000644000176200001440000000307512626133701015375 0ustar liggesusers#include #include #include #include /* finalizer */ yajl_handle push_parser; void reset_parser(){ if(push_parser != NULL){ yajl_free(push_parser); push_parser = NULL; } } SEXP R_feed_push_parser(SEXP x, SEXP reset){ /* raw pointer */ const unsigned char *json = RAW(x); int len = LENGTH(x); /* init new push parser */ if(asLogical(reset)) { reset_parser(); push_parser = push_parser_new(); /* ignore BOM as suggested by RFC */ if(len > 3 && json[0] == 239 && json[1] == 187 && json[2] == 191){ warningcall(R_NilValue, "JSON string contains (illegal) UTF8 byte-order-mark!"); json += 3; len -= 3; } } /* check for errors */ if (yajl_parse(push_parser, json, len) != yajl_status_ok) { unsigned char* errstr = yajl_get_error(push_parser, 1, RAW(x), length(x)); SEXP tmp = mkChar((const char*) errstr); yajl_free_error(push_parser, errstr); reset_parser(); error(CHAR(tmp)); } /* return OK */ return ScalarLogical(1); } SEXP R_finalize_push_parser(SEXP bigint_as_char){ /* check for errors */ if (yajl_complete_parse(push_parser) != yajl_status_ok) { unsigned char* errstr = yajl_get_error(push_parser, 1, NULL, 0); SEXP tmp = mkChar((const char*) errstr); yajl_free_error(push_parser, errstr); reset_parser(); error(CHAR(tmp)); } /* get value */ yajl_val tree = push_parser_get(push_parser); SEXP out = ParseValue(tree, asLogical(bigint_as_char)); yajl_tree_free(tree); reset_parser(); return out; } jsonlite/src/is_recordlist.c0000644000176200001440000000173712626133701015712 0ustar liggesusers#include #include #include #include // .Call("C_is_namedlist", PACKAGE = "jsonlite", 123) bool is_namedlist(SEXP x) { if(TYPEOF(x) == VECSXP && getAttrib(x, R_NamesSymbol) != R_NilValue){ return true; } return false; } bool is_unnamedlist(SEXP x) { if(TYPEOF(x) == VECSXP && getAttrib(x, R_NamesSymbol) == R_NilValue){ return true; } return false; } bool is_namedlist_or_null(SEXP x){ return (is_namedlist(x) || (x == R_NilValue)); } bool is_recordlist(SEXP x){ bool at_least_one_object = false; if(!is_unnamedlist(x)){ return false; } int len = length(x); if(len < 1){ return false; } for (int i=0; i #include #include SEXP C_collapse_array(SEXP x) { if (!isString(x)) error("x must be a character vector."); int len = length(x); size_t nchar_total = 0; for (int i=0; i #include #include #include SEXP C_is_scalarlist(SEXP x) { bool is_scalarlist = true; if (TYPEOF(x) != VECSXP){ is_scalarlist = false; } else { SEXP el; int len = length(x); for (int i=0; i #include #include #include static int s_streamReformat = 0; #define GEN_AND_RETURN(func){\ yajl_gen_status __stat = func;\ if (__stat == yajl_gen_generation_complete && s_streamReformat) {\ yajl_gen_reset(g, "\n");\ __stat = func;\ }\ return __stat == yajl_gen_status_ok;\ } static int reformat_null(void * ctx) { yajl_gen g = (yajl_gen) ctx; GEN_AND_RETURN(yajl_gen_null(g)); } static int reformat_boolean(void * ctx, int boolean) { yajl_gen g = (yajl_gen) ctx; GEN_AND_RETURN(yajl_gen_bool(g, boolean)); } static int reformat_number(void * ctx, const char * s, size_t l) { yajl_gen g = (yajl_gen) ctx; GEN_AND_RETURN(yajl_gen_number(g, s, l)); } static int reformat_string(void * ctx, const unsigned char * stringVal, size_t stringLen) { yajl_gen g = (yajl_gen) ctx; GEN_AND_RETURN(yajl_gen_string(g, stringVal, stringLen)); } static int reformat_map_key(void * ctx, const unsigned char * stringVal, size_t stringLen) { yajl_gen g = (yajl_gen) ctx; GEN_AND_RETURN(yajl_gen_string(g, stringVal, stringLen)); } static int reformat_start_map(void * ctx) { yajl_gen g = (yajl_gen) ctx; GEN_AND_RETURN(yajl_gen_map_open(g)); } static int reformat_end_map(void * ctx) { yajl_gen g = (yajl_gen) ctx; GEN_AND_RETURN(yajl_gen_map_close(g)); } static int reformat_start_array(void * ctx) { yajl_gen g = (yajl_gen) ctx; GEN_AND_RETURN(yajl_gen_array_open(g)); } static int reformat_end_array(void * ctx) { yajl_gen g = (yajl_gen) ctx; GEN_AND_RETURN(yajl_gen_array_close(g)); } static yajl_callbacks callbacks = { reformat_null, reformat_boolean, NULL, NULL, reformat_number, reformat_string, reformat_start_map, reformat_map_key, reformat_end_map, reformat_start_array, reformat_end_array }; SEXP R_reformat(SEXP x, SEXP pretty, SEXP indent_string) { yajl_status stat; yajl_handle hand; yajl_gen g; SEXP output; /* init generator */ g = yajl_gen_alloc(NULL); yajl_gen_config(g, yajl_gen_beautify, asInteger(pretty)); yajl_gen_config(g, yajl_gen_indent_string, translateCharUTF8(asChar(indent_string))); yajl_gen_config(g, yajl_gen_validate_utf8, 0); /* init parser */ hand = yajl_alloc(&callbacks, NULL, (void *) g); /* get data from R */ const char* json = translateCharUTF8(asChar(x)); /* ignore BOM */ if(json[0] == '\xEF' && json[1] == '\xBB' && json[2] == '\xBF'){ json = json + 3; } /* Get length (after removing bom) */ const size_t rd = strlen(json); /* parse */ stat = yajl_parse(hand, (const unsigned char*) json, rd); if(stat == yajl_status_ok) { stat = yajl_complete_parse(hand); } //error message if (stat != yajl_status_ok) { unsigned char* str = yajl_get_error(hand, 1, (const unsigned char*) json, rd); output = mkString((const char*) str); yajl_free_error(hand, str); } else { //create R object const unsigned char* buf; size_t len; yajl_gen_get_buf(g, &buf, &len); //force as UTF8 string output = PROTECT(allocVector(STRSXP, 1)); SET_STRING_ELT(output, 0, mkCharCE((const char*) buf, CE_UTF8)); setAttrib(output, R_ClassSymbol, mkString("json")); UNPROTECT(1); } /* clean up */ yajl_gen_clear(g); yajl_gen_free(g); yajl_free(hand); /* return boolean vec (0 means no errors, means is valid) */ SEXP vec = PROTECT(allocVector(VECSXP, 2)); SET_VECTOR_ELT(vec, 0, ScalarInteger(stat)); SET_VECTOR_ELT(vec, 1, output); UNPROTECT(1); return vec; } jsonlite/src/modp_numtoa.h0000644000176200001440000000545612626133701015376 0ustar liggesusers/* -*- mode: c++; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 4 -*- */ /* vi: set expandtab shiftwidth=4 tabstop=4: */ /** * \file * *

 * Copyright © 2007, Nick Galbreath -- nickg [at] modp [dot] com
 * All rights reserved.
 * http://code.google.com/p/stringencoders/
 * Released under the bsd license.
 * 
* * This defines signed/unsigned integer, and 'double' to char buffer * converters. The standard way of doing this is with "sprintf", however * these functions are * * guarenteed maximum size output * * 5-20x faster! * * core-dump safe * * */ #ifndef COM_MODP_STRINGENCODERS_NUMTOA_H #define COM_MODP_STRINGENCODERS_NUMTOA_H #ifdef __cplusplus #define BEGIN_C extern "C" { #define END_C } #else #define BEGIN_C #define END_C #endif BEGIN_C #include /** \brief convert an signed integer to char buffer * * \param[in] value * \param[out] buf the output buffer. Should be 16 chars or more. */ void modp_itoa10(int32_t value, char* buf); /** \brief convert an unsigned integer to char buffer * * \param[in] value * \param[out] buf The output buffer, should be 16 chars or more. */ void modp_uitoa10(uint32_t value, char* buf); /** \brief convert an signed long integer to char buffer * * \param[in] value * \param[out] buf the output buffer. Should be 24 chars or more. */ void modp_litoa10(int64_t value, char* buf); /** \brief convert an unsigned long integer to char buffer * * \param[in] value * \param[out] buf The output buffer, should be 24 chars or more. */ void modp_ulitoa10(uint64_t value, char* buf); /** \brief convert a floating point number to char buffer with * fixed-precision format * * This is similar to "%.[0-9]f" in the printf style. It will include * trailing zeros * * If the input value is greater than 1<<31, then the output format * will be switched exponential format. * * \param[in] value * \param[out] buf The allocated output buffer. Should be 32 chars or more. * \param[in] precision Number of digits to the right of the decimal point. * Can only be 0-9. */ void modp_dtoa(double value, char* buf, int precision); /** \brief convert a floating point number to char buffer with a * variable-precision format, and no trailing zeros * * This is similar to "%.[0-9]f" in the printf style, except it will * NOT include trailing zeros after the decimal point. This type * of format oddly does not exists with printf. * * If the input value is greater than 1<<31, then the output format * will be switched exponential format. * * \param[in] value * \param[out] buf The allocated output buffer. Should be 32 chars or more. * \param[in] precision Number of digits to the right of the decimal point. * Can only be 0-9. */ void modp_dtoa2(double value, char* buf, int precision); END_C #endif jsonlite/src/parse.c0000644000176200001440000000633412626133701014155 0ustar liggesusers/* This function uses the YAJL tree parser to parse the entire document before converting it to an R list. It might be faster to use the YAJL callback mechanism instead to construct the R list immediately while parsing the JSON. But that looks very complicated. */ #include #include SEXP ParseObject(yajl_val node, int bigint); SEXP ParseArray(yajl_val node, int bigint); SEXP ParseValue(yajl_val node, int bigint); SEXP R_parse(SEXP x, SEXP bigint_as_char) { /* get data from R */ const char* json = translateCharUTF8(asChar(x)); const int bigint = asLogical(bigint_as_char); /* ignore BOM as suggested by RFC */ if(json[0] == '\xEF' && json[1] == '\xBB' && json[2] == '\xBF'){ warningcall(R_NilValue, "JSON string contains (illegal) UTF8 byte-order-mark!"); json = json + 3; } /* parse json */ char errbuf[1024]; yajl_val node = yajl_tree_parse(json, errbuf, sizeof(errbuf)); /* parser error */ if (!node) { Rf_errorcall(R_NilValue, errbuf); } SEXP out = ParseValue(node, bigint); yajl_tree_free(node); return(out); } SEXP ParseValue(yajl_val node, int bigint){ if(YAJL_IS_NULL(node)){ return R_NilValue; } if(YAJL_IS_STRING(node)){ SEXP tmp = PROTECT(allocVector(STRSXP, 1)); SET_STRING_ELT(tmp, 0, mkCharCE(YAJL_GET_STRING(node), CE_UTF8)); UNPROTECT(1); return tmp; } if(YAJL_IS_INTEGER(node)){ long long int val = YAJL_GET_INTEGER(node); /* 2^53 is highest int stored as double without loss */ if(bigint && (val > 9007199254740992 || val < -9007199254740992)){ char buf[32]; #ifdef _WIN32 snprintf(buf, 32, "%I64d", val); #else snprintf(buf, 32, "%lld", val); #endif return mkString(buf); /* see .Machine$integer.max in R */ } else if(val > 2147483647 || val < -2147483647){ return ScalarReal(val); } else { return ScalarInteger(val); } } if(YAJL_IS_DOUBLE(node)){ return(ScalarReal(YAJL_GET_DOUBLE(node))); } if(YAJL_IS_NUMBER(node)){ /* A number that is not int or double (very rare) */ /* This seems to correctly round to Inf/0/-Inf */ return(ScalarReal(YAJL_GET_DOUBLE(node))); } if(YAJL_IS_TRUE(node)){ return(ScalarLogical(1)); } if(YAJL_IS_FALSE(node)){ return(ScalarLogical(0)); } if(YAJL_IS_OBJECT(node)){ return(ParseObject(node, bigint)); } if(YAJL_IS_ARRAY(node)){ return(ParseArray(node, bigint)); } error("Invalid YAJL node type."); } SEXP ParseObject(yajl_val node, int bigint){ int len = YAJL_GET_OBJECT(node)->len; SEXP keys = PROTECT(allocVector(STRSXP, len)); SEXP vec = PROTECT(allocVector(VECSXP, len)); for (int i = 0; i < len; ++i) { SET_STRING_ELT(keys, i, mkCharCE(YAJL_GET_OBJECT(node)->keys[i], CE_UTF8)); SET_VECTOR_ELT(vec, i, ParseValue(YAJL_GET_OBJECT(node)->values[i], bigint)); } setAttrib(vec, R_NamesSymbol, keys); UNPROTECT(2); return vec; } SEXP ParseArray(yajl_val node, int bigint){ int len = YAJL_GET_ARRAY(node)->len; SEXP vec = PROTECT(allocVector(VECSXP, len)); for (int i = 0; i < len; ++i) { SET_VECTOR_ELT(vec, i, ParseValue(YAJL_GET_ARRAY(node)->values[i], bigint)); } UNPROTECT(1); return vec; } jsonlite/NAMESPACE0000644000176200001440000000177112626136411013330 0ustar liggesusers# Generated by roxygen2 (4.1.1): do not edit by hand S3method(print,json) S3method(print,scalar) export(flatten) export(fromJSON) export(minify) export(prettify) export(rbind.pages) export(serializeJSON) export(stream_in) export(stream_out) export(toJSON) export(unbox) export(unserializeJSON) export(validate) import(methods) useDynLib(jsonlite,C_collapse_array) useDynLib(jsonlite,C_collapse_array_pretty_inner) useDynLib(jsonlite,C_collapse_array_pretty_outer) useDynLib(jsonlite,C_collapse_object) useDynLib(jsonlite,C_collapse_object_pretty) useDynLib(jsonlite,C_escape_chars) useDynLib(jsonlite,C_is_recordlist) useDynLib(jsonlite,C_is_scalarlist) useDynLib(jsonlite,C_null_to_na) useDynLib(jsonlite,C_row_collapse_array) useDynLib(jsonlite,C_row_collapse_object) useDynLib(jsonlite,R_feed_push_parser) useDynLib(jsonlite,R_finalize_push_parser) useDynLib(jsonlite,R_integer64_to_char) useDynLib(jsonlite,R_num_to_char) useDynLib(jsonlite,R_parse) useDynLib(jsonlite,R_reformat) useDynLib(jsonlite,R_validate) jsonlite/NEWS0000644000176200001440000001131712626136653012615 0ustar liggesusers0.9.19 - Remove the View() workaround because it causes issues in RStudio and because the bug in utils::View has been fixed in R-patched. 0.9.18 - Removed support for the archived int64 package. Use bit64 instead. - The stream_in function now skips over blank lines - Remove workaround for View() in rstudio - fromJSON now sets an 'Accept' request header for URLs. 0.9.17 - Fix for 1 dimensional array objects - Fix for unnamed data frame - Fix for duplicate _row fields - The 'unbox' function now accepts classed scalars such as dates - Ignore BOM with warning in push parser 0.9.16 - Performance optimizations in asJSON.data.frame by avoiding apply() - Prettifying is now done by default in R (thanks to Yihui Xie) - Add json_verbatim option to insert verbatim json - Improve verbose output of streaming functions 0.9.15 - Replaced RCurl dependency with the new curl package. - Added push parser for more efficient parsing from connection interfaces (e.g. files or urls). - The toMongo function has been removed. Use stream_out instead. - The `View` function will not be exposed in recent versions of rstudio that supported nested data natively. - Add hidden bigint_as_char argument in fromJSON - Fixed a memory leak in fromJSON. - I() forces boxing of a scalar when 'auto_unbox = TRUE' for RJSONIO compatibility. - toJSON now supports 'keep_vec_names=TRUE' will mimic RJSONIO legacy behavior. - toJSON now supports 'time_format' argument to customize formatting of POSIXt strings. 0.9.14 - Change license to MIT (for GPL2 compatibility). - Add support for digits = I(n) to use significant precision. - When 'pretty' in toJSON is numeric, it specifies the number of spaces to indent. - Bug fix in validate() related to ScalarLogical - Add support for dataframe = "values" - Byte-order-marks are now ignored as suggested by rfc7159 - Add support for integer64 class (bit64 package) 0.9.13 - Ported some number formatting to C - Fix when http server gives no content-type header - Prevent auto unboxing within a matrix - Assume UTF8 but fall back on native encoding for files and urls in fromJSON - Fix for 32 bit windows XP. 0.9.12 - New JSON parser based on yajl. Some changes: * smaller and faster than libjson * better error messages * automatic validation * native \uXXXX unicode parsing * integer parsing * uses spaces intead of tabs for prettify - Other functions ported to C: collapse_object, null_to_na, is_recordlist, is_scalarlist - Expose 'flatten' function - Row names are encoded as "_row" instead of "$row" - Start with streaming functions - Some internal changes to support BSON/MongoDB 0.9.11 - Added toJSON null argument - Fix bug in prettify object name escaping - Use C code for escaping and collapsing vectors - Several performance enhancements in fromJSON/simplify code - The auto_unbox argument is ignored for dataframes when dataframe="column" 0.9.10 - Add support for escaped (hexadecimal) unicode characters in fromJSON(unicode = TRUE) - Exporting a wrapper for View() that flattens nested data frames - Add print method for scalar (unbox) 0.9.9 - Fix bug where 'flatten' argument wasn't passed down - Make asJSON automatically unname any named lists within a data frame - fromJSON(url) now sets Accept header - Increase default to digits=4 in toJSON 0.9.8 - Remove whitespace from default output of toJSON - Split vignette in 3 documents - Add support for simplifyMatrix within a data frame - Add class "json" output of toJSON, simplify, minify - Add print method for json - Cleaned up unit tests - Workaround for SSL handshake issues in https downloads - Raise warnings for SSL or Content-Type abnormalities 0.9.7 - formally expose 'force' argument in toJSON - formally expose 'flatten' argument in fromJSON - fix bug in simplifyDataframe - fix in rlibjson code that converted empty array to AsIs object - auto coerse mongo type dates in fromJSON 0.9.6 - toJSON gains argument auto_unbox - Minor fixes 0.9.5 - Never raise error in toJSON when force=true - Fix typo in line 2 of JSONDefs/GNU_C.h (GUN -> GNU) - Run unit tests during R CMD check - Update vignette 0.9.4 - Added minify function - Added unbox function - Fixed bug where toJSON 'force' argument wasn't passed down - Removed -DJSON_VALIDATE -DJSON_STREAM=1 from Makevars.in (already in JSONoptions.h) 0.9.3 - Changes by Prof Ripley for Solaris/CRAN 0.9.2 - Update libjson to 7.6.1 (fixes bug in the validator) - Generalized toJSON method 'matrix' to 'array' to work for higher dimensions. - Add option to encode matrix either row-major or column-major. - Set default SSL version to 3 for fromJSON("https://...") 0.9.1 - Major performance improvements for toJSON.data.frame 0.9.0 - Initial release jsonlite/R/0000755000176200001440000000000012626136263012311 5ustar liggesusersjsonlite/R/asJSON.POSIXt.R0000644000176200001440000000176112540777273014632 0ustar liggesuserssetMethod("asJSON", "POSIXt", function(x, POSIXt = c("string", "ISO8601", "epoch", "mongo"), UTC = FALSE, digits, time_format = NULL, ...) { # note: UTC argument doesn't seem to be working consistently maybe use ?format # instead of ?as.character # Validate POSIXt <- match.arg(POSIXt) # Encode based on a schema if (POSIXt == "mongo") { if (is(x, "POSIXlt")) { x <- as.POSIXct(x) } return(asJSON(data.frame("$date" = floor(unclass(x) * 1000), check.names = FALSE), digits = 0, ...)) } # Epoch millis if (POSIXt == "epoch") { return(asJSON(floor(unclass(as.POSIXct(x)) * 1000), digits = digits, ...)) } # Strings if(is.null(time_format)){ time_format <- if(POSIXt == "string"){ "" } else if(isTRUE(UTC)){ "%Y-%m-%dT%H:%M:%SZ" } else { "%Y-%m-%dT%H:%M:%S" } } if (isTRUE(UTC)) { asJSON(as.character(x, format = time_format, tz = "UTC"), ...) } else { asJSON(as.character(x, format = time_format), ...) } }) jsonlite/R/asJSON.logical.R0000644000176200001440000000177112540777273015157 0ustar liggesuserssetMethod("asJSON", "logical", function(x, collapse = TRUE, na = c("null", "string", "NA"), auto_unbox = FALSE, keep_vec_names = FALSE, indent = NA_integer_, ...) { # shiny legacy exception if(isTRUE(keep_vec_names) && length(names(x))){ warn_keep_vec_names() return(asJSON(as.list(x), collapse = collapse, na = na, auto_unbox = TRUE, ...)) } # validate arg na <- match.arg(na) # json true/false tmp <- ifelse(x, "true", "false") # replace missing values, unless na="NA" if(!identical(na, "NA")){ # logical values can have NA (but not Inf/NaN). Default is to encode as null. if (any(missings <- which(is.na(x)))) { tmp[missings] <- ifelse(identical(na, "string"), "\"NA\"", "null") } } #this is needed when !length(tmp) or all(is.na(tmp)) if(!is.character(tmp)){ tmp <- as.character(tmp); } if(isTRUE(auto_unbox) && length(tmp) == 1){ return(tmp); } # collapse it if(collapse) { collapse(tmp, indent = indent) } else { tmp } }) jsonlite/R/stop.R0000644000176200001440000000011112540777273013421 0ustar liggesusersstop <- function(..., call. = FALSE){ base::stop(..., call. = FALSE) } jsonlite/R/asJSON.array.R0000644000176200001440000000205112562610405014636 0ustar liggesuserssetMethod("asJSON", "array", function(x, collapse = TRUE, na = NULL, oldna = NULL, matrix = c("rowmajor", "columnmajor"), auto_unbox = FALSE, keep_vec_names = FALSE, indent = NA_integer_, ...) { #validate matrix <- match.arg(matrix); # reset na arg when called from data frame if(identical(na, "NA")){ na <- oldna; } # 1D arrays are vectors if(length(dim(x)) < 2){ return(asJSON(c(x), matrix = matrix, na = na, indent = indent + 2L, ...)) } # if collapse == FALSE, then this matrix is nested inside a data frame, # and therefore row major is required to match dimensions # dont pass auto_unbox (never unbox within matrix) margin <- ifelse(identical(matrix, "columnmajor") && isTRUE(collapse), length(dim(x)), 1); tmp <- apply(x, margin, asJSON, matrix = matrix, na = na, indent = indent + 2L, ...) # collapse it if (collapse) { collapse(tmp, inner = FALSE, indent = indent) } else { tmp } }) # Some objects have class Matrix but not class Array setMethod("asJSON", "matrix", getMethod("asJSON", "array")) jsonlite/R/fromJSON.R0000644000176200001440000001464412626136406014101 0ustar liggesusers#' These functions are used to convert between JSON data and \R{} objects. The \code{\link{toJSON}} and \code{\link{fromJSON}} #' functions use a class based mapping, which follows conventions outlined in this paper: \url{http://arxiv.org/abs/1403.2805} (also available as vignette). #' #' The \code{\link{toJSON}} and \code{\link{fromJSON}} functions are drop-in replacements for the identically named functions #' in packages \code{rjson} and \code{RJSONIO}. Our implementation uses an alternative, somewhat more consistent mapping #' between \R{} objects and JSON strings. #' #' The \code{\link{serializeJSON}} and \code{\link{unserializeJSON}} functions in this package use an #' alternative system to convert between \R{} objects and JSON, which supports more classes but is much more verbose. #' #' A JSON string is always unicode, using \code{UTF-8} by default, hence there is usually no need to escape any characters. #' However, the JSON format does support escaping of unicode characters, which are encoded using a backslash followed by #' a lower case \code{"u"} and 4 hex characters, for example: \code{"Z\\u00FCrich"}. The \code{fromJSON} function #' will parse such escape sequences but it is usually preferable to encode unicode characters in JSON using native #' \code{UTF-8} rather than escape sequences. # #' @rdname fromJSON #' @title Convert \R{} objects to/from JSON #' @name toJSON, fromJSON #' @aliases fromJSON toJSON jsonlite #' @export fromJSON toJSON #' @param txt a JSON string, URL or file #' @param simplifyVector coerce JSON arrays containing only primitives into an atomic vector #' @param simplifyDataFrame coerce JSON arrays containing only records (JSON objects) into a data frame #' @param simplifyMatrix coerce JSON arrays containing vectors of equal mode and dimension into matrix or array #' @param flatten automatically \code{\link{flatten}} nested data frames into a single non-nested data frame #' @param x the object to be encoded #' @param dataframe how to encode data.frame objects: must be one of 'rows', 'columns' or 'values' #' @param matrix how to encode matrices and higher dimensional arrays: must be one of 'rowmajor' or 'columnmajor'. #' @param Date how to encode Date objects: must be one of 'ISO8601' or 'epoch' #' @param POSIXt how to encode POSIXt (datetime) objects: must be one of 'string', 'ISO8601', 'epoch' or 'mongo' #' @param factor how to encode factor objects: must be one of 'string' or 'integer' #' @param complex how to encode complex numbers: must be one of 'string' or 'list' #' @param raw how to encode raw objects: must be one of 'base64', 'hex' or 'mongo' #' @param null how to encode NULL values within a list: must be one of 'null' or 'list' #' @param na how to print NA values: must be one of 'null' or 'string'. Defaults are class specific #' @param auto_unbox automatically \code{\link{unbox}} all atomic vectors of length 1. It is usually safer to avoid this and instead use the \code{\link{unbox}} function to unbox individual elements. #' An exception is that objects of class \code{AsIs} (i.e. wrapped in \code{I()}) are not automatically unboxed. This is a way to mark single values as length-1 arrays. #' @param digits max number of decimal digits to print for numeric values. Use \code{I()} to specify significant digits. #' @param force unclass/skip objects of classes with no defined JSON mapping #' @param pretty adds indentation whitespace to JSON output. Can be TRUE/FALSE or a number specifying the number of spaces to indent. See \code{\link{prettify}} #' @param ... arguments passed on to class specific \code{print} methods #' @references Jeroen Ooms (2014). The \code{jsonlite} Package: A Practical and Consistent Mapping Between JSON Data and \R{} Objects. \emph{arXiv:1403.2805}. \url{http://arxiv.org/abs/1403.2805} #' @examples # Stringify some data #' jsoncars <- toJSON(mtcars, pretty=TRUE) #' cat(jsoncars) #' #' # Parse it back #' fromJSON(jsoncars) #' #' # Parse escaped unicode #' fromJSON('{"city" : "Z\\u00FCrich"}') #' #' # Decimal vs significant digits #' toJSON(pi, digits=3) #' toJSON(pi, digits=I(3)) #' #' \dontrun{retrieve data frame #' data1 <- fromJSON("https://api.github.com/users/hadley/orgs") #' names(data1) #' data1$login #' #' # Nested data frames: #' data2 <- fromJSON("https://api.github.com/users/hadley/repos") #' names(data2) #' names(data2$owner) #' data2$owner$login #' #' # Flatten the data into a regular non-nested dataframe #' names(flatten(data2)) #' #' # Flatten directly (more efficient): #' data3 <- fromJSON("https://api.github.com/users/hadley/repos", flatten = TRUE) #' identical(data3, flatten(data2)) #' } fromJSON <- function(txt, simplifyVector = TRUE, simplifyDataFrame = simplifyVector, simplifyMatrix = simplifyVector, flatten = FALSE, ...) { # check type if (!is.character(txt) && !is(txt, "connection")) { stop("Argument 'txt' must be a JSON string, URL or file.") } # overload for URL or path if (is.character(txt) && length(txt) == 1 && nchar(txt, type="bytes") < 10000) { if (grepl("^https?://", txt, useBytes=TRUE)) { loadpkg("curl") h <- curl::new_handle(useragent = paste("jsonlite /", R.version.string)) curl::handle_setheaders(h, Accept = "application/json, text/*, */*") txt <- curl::curl(txt, handle = h) } else if (file.exists(txt)) { # With files we can never know for sure the encoding. Lets try UTF8 first. # txt <- raw_to_json(readBin(txt, raw(), file.info(txt)$size)); txt <- file(txt) } } # call the actual function (with deprecated arguments) fromJSON_string(txt = txt, simplifyVector = simplifyVector, simplifyDataFrame = simplifyDataFrame, simplifyMatrix = simplifyMatrix, flatten = flatten, ...) } fromJSON_string <- function(txt, simplifyVector = TRUE, simplifyDataFrame = simplifyVector, simplifyMatrix = simplifyVector, flatten = FALSE, unicode = TRUE, validate = TRUE, bigint_as_char = FALSE, ...){ if(!missing(unicode)){ message("Argument unicode has been deprecated. YAJL always parses unicode.") } if(!missing(validate)){ message("Argument validate has been deprecated. YAJL automatically validates json while parsing.") } # parse obj <- parseJSON(txt, bigint_as_char) # post processing if (any(isTRUE(simplifyVector), isTRUE(simplifyDataFrame), isTRUE(simplifyMatrix))) { return(simplify(obj, simplifyVector = simplifyVector, simplifyDataFrame = simplifyDataFrame, simplifyMatrix = simplifyMatrix, flatten = flatten, ...)) } else { return(obj) } } jsonlite/R/cleannames.R0000644000176200001440000000032612540777273014552 0ustar liggesuserscleannames <- function(objnames){ objnames[objnames == ""] <- NA_character_ is_missing <- is.na(objnames) objnames[is_missing] <- as.character(seq_len(length(objnames)))[is_missing] make.unique(objnames) } jsonlite/R/serializeJSON.R0000644000176200001440000000405312540777273015126 0ustar liggesusers#' The \code{\link{serializeJSON}} and \code{\link{unserializeJSON}} functions convert between #' \R{} objects to JSON data. Instead of using a class based mapping like #' \code{\link{toJSON}} and \code{\link{fromJSON}}, the serialize functions base the encoding #' schema on the storage type, and capture all data and attributes from any object. #' Thereby the object can be restored almost perfectly from its JSON representation, but #' the resulting JSON output is very verbose. Apart from environments, all standard storage #' types are supported. #' #' @rdname serializeJSON #' @title serialize R objects to JSON #' @name serializeJSON #' @aliases serializeJSON unserializeJSON #' @export serializeJSON unserializeJSON #' @param x an \R{} object to be serialized #' @param digits max number of digits (after the dot) to print for numeric values #' @param pretty add indentation/whitespace to JSON output. See \code{\link{prettify}} #' @note JSON is a text based format which leads to loss of precision when printing numbers. #' @examples jsoncars <- serializeJSON(mtcars) #' mtcars2 <- unserializeJSON(jsoncars) #' identical(mtcars, mtcars2) #' #' set.seed('123') #' myobject <- list( #' mynull = NULL, #' mycomplex = lapply(eigen(matrix(-rnorm(9),3)), round, 3), #' mymatrix = round(matrix(rnorm(9), 3),3), #' myint = as.integer(c(1,2,3)), #' mydf = cars, #' mylist = list(foo='bar', 123, NA, NULL, list('test')), #' mylogical = c(TRUE,FALSE,NA), #' mychar = c('foo', NA, 'bar'), #' somemissings = c(1,2,NA,NaN,5, Inf, 7 -Inf, 9, NA), #' myrawvec = charToRaw('This is a test') #' ); #' identical(unserializeJSON(serializeJSON(myobject)), myobject); serializeJSON <- function(x, digits = 8, pretty = FALSE) { # just to verify that obj exists is(x) # we pass arguments both to asJSON as well as packaging object. asJSON(pack(x), digits = digits, indent = if (isTRUE(pretty)) 0L else NA_integer_) } #' @param txt a JSON string which was created using \code{serializeJSON} #' @rdname serializeJSON unserializeJSON <- function(txt) { unpack(parseJSON(txt)) } jsonlite/R/asJSON.AAAgeneric.R0000644000176200001440000000010712540777273015454 0ustar liggesuserssetGeneric("asJSON", function(x, ...) { standardGeneric("asJSON") }) jsonlite/R/asJSON.NULL.R0000644000176200001440000000032112540777273014305 0ustar liggesusers# Note that this is different from RJSONIO because null values are NA. setMethod("asJSON", "NULL", function(x, null = "list", ...) { if(null == "null"){ return("null") } else { return("{}") } }) jsonlite/R/asJSON.pairlist.R0000644000176200001440000000017112540777273015365 0ustar liggesuserssetOldClass("pairlist") setMethod("asJSON", "pairlist", function(x, ...) { asJSON(as.vector(x, mode = "list"), ...) }) jsonlite/R/prettify.R0000644000176200001440000000163212540777273014313 0ustar liggesusers#' Prettify adds indentation to a JSON string; minify removes all indentation/whitespace. #' #' @rdname prettify #' @title Prettify or minify a JSON string #' @name prettify, minify #' @aliases minify prettify #' @export prettify minify #' @param txt JSON string #' @param indent number of spaces to indent #' @useDynLib jsonlite R_reformat #' @examples myjson <- toJSON(cars) #' cat(myjson) #' prettify(myjson) #' minify(myjson) prettify <- function(txt, indent = 4) { txt <- paste(txt, collapse = "\n") reformat(txt, TRUE, indent_string = paste(rep(" ", as.integer(indent)), collapse="")) } #' @rdname prettify minify <- function(txt) { txt <- paste(txt, collapse = "\n") reformat(txt, FALSE) } reformat <- function(x, pretty, indent_string = ""){ out <- .Call(R_reformat, x, pretty, indent_string = indent_string); if(out[[1]] == 0) { return(out[[2]]) } else { stop(out[[2]], call.=FALSE) } } jsonlite/R/makesymbol.R0000644000176200001440000000041712540777273014610 0ustar liggesusers# Note: 'symbol' is the same thing as 'name' For some reason, as.name('') gives # an error, even though it is needed sometimes. This is a workaround makesymbol <- function(x) { if (missing(x) || nchar(x) == 0) { return(substitute()) } else { as.name(x) } } jsonlite/R/asJSON.int64.R0000644000176200001440000000021212602746171014466 0ustar liggesusers#setOldClass("int64") #setMethod("asJSON", "int64", function(x, digits, ...) { # asJSON(as.double(as.character(x)), digits = 0, ...) #}) jsonlite/R/asJSON.scalar.R0000644000176200001440000000046312540777273015007 0ustar liggesuserssetOldClass("scalar") setMethod("asJSON", "scalar", function(x, collapse, ...) { # TODO: There must be a way to do this with NextMethod() if (length(class(x)) > 1) { class(x) <- class(x)[-1] } else { x <- unclass(x) } # Print JSON without [] return(asJSON(x, collapse = FALSE, ...)) }) jsonlite/R/asJSON.classRepresentation.R0000644000176200001440000000060512540777273017570 0ustar liggesusers# classRepresentation is an object that defines an S4 class encoding it usually # doesn't serve much purpose, however as we don't wnat to encode it as a regular # S4 data object. # it currently only encodes the slots. we could add encoding of methods of that # would be desired. setMethod("asJSON", "classRepresentation", function(x, ...) { return(asJSON(attributes(x)$slots, ...)) }) jsonlite/R/asJSON.AsIs.R0000644000176200001440000000062312540777273014377 0ustar liggesuserssetOldClass("AsIs") setMethod("asJSON", "AsIs", function(x, auto_unbox = FALSE, ...) { # Strip off the AsIs class so we can dispatch to other asJSON methods. class(x) <- setdiff(class(x), "AsIs") if (is.atomic(x) && length(x) == 1) { # Never auto_unbox single values when wrapped with I() asJSON(x, auto_unbox = FALSE, ...) } else { asJSON(x, auto_unbox = auto_unbox, ...) } }) jsonlite/R/is.recordlist.R0000644000176200001440000000113012540777273015222 0ustar liggesusers#' @useDynLib jsonlite C_is_recordlist is_recordlist_c <- function(x){ .Call(C_is_recordlist, x) } is_recordlist_r <- function(x) { if (!(is.unnamedlist(x) && length(x))) { return(FALSE) } at_least_one_object = FALSE for(i in x){ if(!(is.namedlist(i) || is.null(i))) return(FALSE) if(!at_least_one_object && is.namedlist(i)) at_least_one_object <- TRUE } return(at_least_one_object) } is.recordlist <- is_recordlist_c; is.namedlist <- function(x) { isTRUE(is.list(x) && !is.null(names(x))) } is.unnamedlist <- function(x) { isTRUE(is.list(x) && is.null(names(x))) } jsonlite/R/list_to_vec.R0000644000176200001440000000014512540777273014755 0ustar liggesuserslist_to_vec <- function(x) { return(unlist(null_to_na(x), recursive = FALSE, use.names = FALSE)) } jsonlite/R/unescape_unicode.R0000644000176200001440000000075612540777273015764 0ustar liggesusersunescape_unicode <- function(x){ #single string only stopifnot(is.character(x) && length(x) == 1) #find matches m <- gregexpr("(\\\\)+u[0-9a-z]{4}", x, ignore.case = TRUE) if(m[[1]][1] > -1){ #parse matches p <- vapply(regmatches(x, m)[[1]], function(txt){ gsub("\\", "\\\\", parse(text=paste0('"', txt, '"'))[[1]], fixed = TRUE, useBytes = TRUE) }, character(1), USE.NAMES = FALSE) #substitute parsed into original regmatches(x, m) <- list(p) } x } jsonlite/R/simplifyDataFrame.R0000644000176200001440000000513712540777273016052 0ustar liggesuserssimplifyDataFrame <- function(recordlist, columns, flatten, simplifyMatrix) { # no records at all if (!length(recordlist)) { if (!missing(columns)) { return(as.data.frame(matrix(ncol = length(columns), nrow = 0, dimnames = list(NULL, columns)))) } else { return(data.frame()) } } # only empty records and unknown columns if (!any(vapply(recordlist, length, integer(1), USE.NAMES = FALSE)) && missing(columns)) { return(data.frame(matrix(nrow = length(recordlist), ncol = 0))) } # find columns if not specified if (missing(columns)) { columns <- unique(unlist(lapply(recordlist, names), recursive = FALSE, use.names = FALSE)) } # make new recordlist with requested only requested values #recordlist <- lapply(recordlist, function(x) { # # a new record with each requested column # x <- as.list(x)[columns] # names(x) <- columns # x #}) # Convert row lists to column lists. This is the heavy lifting columnlist <- lapply(columns, function(x) lapply(recordlist, "[[", x)) # simplify vectors and nested data frames columnlist <- lapply(columnlist, simplify, simplifyVector = TRUE, simplifyDataFrame = TRUE, simplifyMatrix = FALSE, simplifySubMatrix = simplifyMatrix, flatten = flatten) # check that all elements have equal length columnlengths <- unlist(vapply(columnlist, function(z) { ifelse(length(dim(z)) > 1, nrow(z), length(z)) }, integer(1))) n <- unique(columnlengths) if (length(n) > 1) { stop("Elements not of equal length: ", paste(columnlengths, collapse = " ")) } # add the column names before flattening names(columnlist) <- columns # flatten nested data frames if(isTRUE(flatten)) { dfcolumns <- vapply(columnlist, is.data.frame, logical(1)) if(any(dfcolumns)){ columnlist <- c(columnlist[!dfcolumns], do.call(c, columnlist[dfcolumns])) } } # make into data frame class(columnlist) <- "data.frame" # set row names if("_row" %in% names(columnlist)) { rn <- columnlist[["_row"]]; columnlist["_row"] <- NULL; # row.names() casts double to character which is undesired. if(is.double(rn)) { rn <- as.integer(rn); } # data frames MUST have row names if(any(duplicated(rn))){ warning('Duplicate names in "_row" field. Data frames must have unique row names.', call. = FALSE) if(is.character(rn)) { row.names(columnlist) <- make.unique(rn) } else { row.names(columnlist) <- seq_len(n) } } else { row.names(columnlist) <- rn; } } else { row.names(columnlist) <- seq_len(n) } return(columnlist) } jsonlite/R/pack.R0000644000176200001440000000723612540777273013371 0ustar liggesusers# Note: For S4, the value is the class defintion. The slots (data) are in the # attributes. pack <- function(obj, ...) { # encode by storage mode encoding.mode <- typeof(obj) # needed because formals become attributes, etc if (encoding.mode == "closure") { obj <- as.list(obj) } # special exception if (encoding.mode == "environment" && isNamespace(obj)) { encoding.mode <- "namespace" } # encode recursively list( type = as.scalar(encoding.mode), attributes = givename(lapply(attributes(obj), pack, ...)), value = switch(encoding.mode, `NULL` = obj, environment = NULL, externalptr = NULL, namespace = lapply(as.list(getNamespaceInfo(obj, "spec")), as.scalar), S4 = list(class = as.scalar(as.character(attr(obj, "class"))), package = as.scalar(attr(attr(obj, "class"), "package"))), raw = as.scalar(base64_encode(unclass(obj))), logical = as.vector(unclass(obj), mode = "logical"), integer = as.vector(unclass(obj), mode = "integer"), numeric = as.vector(unclass(obj), mode = "numeric"), double = as.vector(unclass(obj), mode = "double"), character = as.vector(unclass(obj), mode = "character"), complex = as.vector(unclass(obj), mode = "complex"), list = unname(lapply(unclass(obj), pack, ...)), pairlist = unname(lapply(as.vector(obj, mode = "list"), pack, ...)), closure = unname(lapply(obj, pack, ...)), builtin = as.scalar(base64_encode(serialize(unclass(obj), NULL))), special = as.scalar(base64_encode(serialize(unclass(obj), NULL))), language = deparse(unclass(obj)), name = deparse(unclass(obj)), symbol = deparse(unclass(obj)), expression = deparse(obj[[1]]), warning("No encoding has been defined for objects with storage mode ", encoding.mode, " and will be skipped.") ) ) } unpack <- function(obj) { encoding.mode <- obj$type newdata <- c( list(.Data = switch(encoding.mode, `NULL` = NULL, environment = new.env(parent=emptyenv()), namespace = getNamespace(obj$value$name), externalptr = NULL, S4 = getClass(obj$value$class, where = getNamespace(obj$value$package)), raw = base64_decode(obj$value), logical = as.logical(list_to_vec(obj$value)), integer = as.integer(list_to_vec(obj$value)), numeric = as.numeric(list_to_vec(obj$value)), double = as.double(list_to_vec(obj$value)), character = as.character(list_to_vec(obj$value)), complex = as.complex(list_to_vec(obj$value)), list = lapply(obj$value, unpack), pairlist = lapply(obj$value, unpack), symbol = makesymbol(x = unlist(obj$value)), name = makesymbol(x = unlist(obj$value)), expression = parse(text = obj$value), language = as.call(parse(text = unlist(obj$value)))[[1]], special = unserialize(base64_decode(obj$value)), builtin = unserialize(base64_decode(obj$value)), closure = lapply(obj$value, unpack), stop("Switch falling through for encode.mode: ", encoding.mode) ) ), lapply(obj$attributes, unpack)) # this is for serializing functions arguments: as.list(lm)$data if (identical(newdata[[1]], substitute())) { return(substitute()) } # build the output object output <- do.call("structure", newdata, quote = TRUE) # functions are special if (encoding.mode == "closure") { myfn <- as.function(output) environment(myfn) <- globalenv() return(myfn) } # functions are special if (encoding.mode == "pairlist") { return(as.pairlist(output)) } # try to fix native symbols if (is(output, "NativeSymbolInfo")) { try(output <- fixNativeSymbol(output)) } # return return(output) } jsonlite/R/rbind.pages.R0000644000176200001440000000531112540777273014637 0ustar liggesusers#' Combine pages into a single data frame #' #' The \code{rbind.pages} function is used to combine a list of data frames into a single #' data frame. This is often needed when working with a JSON API that limits the amount #' of data per request. If we need more data than what fits in a single request, we need to #' perform multiple requests that each retrieve a fragment of data, not unlike pages in a #' book. In practice this is often implemented using a \code{page} parameter in the API. The #' \code{rbind.pages} function can be used to combine these pages back into a single dataset. #' #' The \code{\link{rbind.pages}} function generalizes \code{\link[base:rbind]{base::rbind}} and #' \code{\link[plyr:rbind.fill]{plyr::rbind.fill}} with added support for nested data frames. Not each column #' has to be present in each of the individual data frames; missing columns will be filled #' up in \code{NA} values. #' #' @export #' @param pages a list of data frames, each representing a \emph{page} of data #' @examples # Basic example #' x <- data.frame(foo = rnorm(3), bar = c(TRUE, FALSE, TRUE)) #' y <- data.frame(foo = rnorm(2), col = c("blue", "red")) #' rbind.pages(list(x, y)) #' #' \dontrun{ #' baseurl <- "http://projects.propublica.org/nonprofits/api/v1/search.json" #' pages <- list() #' for(i in 0:20){ #' mydata <- fromJSON(paste0(baseurl, "?order=revenue&sort_order=desc&page=", i)) #' message("Retrieving page ", i) #' pages[[i+1]] <- mydata$filings #' } #' filings <- rbind.pages(pages) #' nrow(filings) #' colnames(filings) #' } rbind.pages <- function(pages){ #Load plyr loadpkg("plyr") #validate input stopifnot(is.list(pages)) # edge case if(!length(pages)){ return(data.frame()) } # All elements must be data frames or NULL. pages <- Filter(function(x) {!is.null(x)}, pages); stopifnot(all(vapply(pages, is.data.frame, logical(1)))) # Extract data frame column names dfdf <- lapply(pages, vapply, is.data.frame, logical(1)) dfnames <- unique(names(which(unlist(dfdf)))) # No sub data frames if(!length(dfnames)){ return(plyr::rbind.fill(pages)) } # Extract the nested data frames subpages <- lapply(dfnames, function(colname){ rbind.pages(lapply(pages, function(df) { if(!is.null(df[[colname]])) df[[colname]] else as.data.frame(matrix(nrow=nrow(df), ncol=0)) })) }) # Remove data frame columns pages <- lapply(pages, function(df){ issubdf <- vapply(df, is.data.frame, logical(1)) if(any(issubdf)) df[issubdf] <- rep(NA, nrow(df)) df }) # Bind rows outdf <- plyr::rbind.fill(pages) # Combine wih sub dataframes for(i in seq_along(subpages)){ outdf[[dfnames[i]]] <- subpages[[i]] } #out outdf } jsonlite/R/flatten.R0000644000176200001440000000263612540777273014107 0ustar liggesusers#' Flatten nested data frames #' #' In a nested data frame, one or more of the columns consist of another data #' frame. These structures frequently appear when parsing JSON data from the web. #' We can flatten such data frames into a regular 2 dimensional tabular structure. #' #' @export #' @param x a data frame #' @param recursive flatten recursively #' @examples options(stringsAsFactors=FALSE) #' x <- data.frame(driver = c("Bowser", "Peach"), occupation = c("Koopa", "Princess")) #' x$vehicle <- data.frame(model = c("Piranha Prowler", "Royal Racer")) #' x$vehicle$stats <- data.frame(speed = c(55, 34), weight = c(67, 24), drift = c(35, 32)) #' str(x) #' str(flatten(x)) #' str(flatten(x, recursive = FALSE)) #' #' \dontrun{ #' data1 <- fromJSON("https://api.github.com/users/hadley/repos") #' colnames(data1) #' colnames(data1$owner) #' colnames(flatten(data1)) #' #' # or for short: #' data2 <- fromJSON("https://api.github.com/users/hadley/repos", flatten = TRUE) #' colnames(data2) #' } #' flatten <- function(x, recursive = TRUE){ stopifnot(is.data.frame(x)) nr <- nrow(x) dfcolumns <- vapply(x, is.data.frame, logical(1)) if(!any(dfcolumns)){ return(x) } x <- if(recursive){ c(x[!dfcolumns], do.call(c, lapply(x[dfcolumns], flatten))) } else { c(x[!dfcolumns], do.call(c, x[dfcolumns])) } class(x) <- "data.frame" row.names(x) <- if(!nr) character(0) else 1:nr; x } #1,2,3,df1,5,6,7,df2,9 jsonlite/R/raw_to_json.R0000644000176200001440000000101012540777273014757 0ustar liggesusers# This function deals with some uncertainty in character encoding when reading # from files and URLs. It tries UTF8 first, but falls back on native if it is # certainly not UTF8. raw_to_json <- function(x){ txt <- rawToChar(x); Encoding(txt) <- "UTF-8" isvalid <- validate(txt) if(!isvalid && grepl("invalid bytes in UTF8", attr(isvalid, "err"), fixed=TRUE, useBytes=TRUE)){ warning("The json string is not valid UTF-8. Assuming native encoding.", call. = FALSE) Encoding(txt) <- ""; } return(txt) } jsonlite/R/apply_by_pages.R0000644000176200001440000000145112540777273015442 0ustar liggesusersapply_by_pages <- function(x, FUN, pagesize, verbose, ...){ stopifnot(is.data.frame(x)) nr <- nrow(x) npages <- nr %/% pagesize; lastpage <- nr %% pagesize; for(i in seq_len(npages)){ from <- pagesize * (i-1) + 1; to <- pagesize * i FUN(x[from:to, ,drop = FALSE], ...) if(verbose) cat("\rProcessed", i * pagesize, "rows...") } if(lastpage){ from <- nr - lastpage + 1; FUN(x[from:nr, ,drop = FALSE], ...) } if(verbose) cat("\rComplete! Processed total of", nr, "rows.\n") invisible(); } #this is another slightly slower implementation apply_by_pages2 <- function(x, FUN, pagesize, verbose, ...){ x2 <- split(x, seq_len(nrow(x)) %/% pagesize) for(page in x2){ if(verbose) message("Writing ", nrow(page), " lines (", ").") FUN(page) } invisible() } jsonlite/R/asJSON.raw.R0000644000176200001440000000065212540777273014333 0ustar liggesuserssetMethod("asJSON", "raw", function(x, raw = c("base64", "hex", "mongo"), ...) { # validate raw <- match.arg(raw) # encode based on schema if (raw == "mongo") { return(asJSON(list(`$binary` = as.scalar(base64_encode(x)), `$type` = as.scalar("5")))) } else if (raw == "hex") { return(asJSON(as.character.hexmode(x), ...)) } else { # no as scalar here! return(asJSON(base64_encode(x), ...)) } }) jsonlite/R/simplify.R0000644000176200001440000000764112540777273014307 0ustar liggesuserssimplify <- function(x, simplifyVector = TRUE, simplifyDataFrame = TRUE, simplifyMatrix = TRUE, simplifyDate = simplifyVector, homoList = TRUE, flatten = FALSE, columnmajor = FALSE, simplifySubMatrix = simplifyMatrix) { #This includes '[]' and '{}') if (!is.list(x) || !length(x)) { return(x) } # list can be a dataframe recordlist if (isTRUE(simplifyDataFrame) && is.recordlist(x)) { mydf <- simplifyDataFrame(x, flatten = flatten, simplifyMatrix = simplifySubMatrix) if(isTRUE(simplifyDate) && is.data.frame(mydf) && is.datelist(mydf)){ return(structure(mydf[["$date"]]/1000, class=c("POSIXct", "POSIXt"))) } return(mydf) } # or a scalar list (atomic vector) if (isTRUE(simplifyVector) && is.null(names(x)) && is.scalarlist(x)) { return(list_to_vec(x)) } # apply recursively out <- lapply(x, simplify, simplifyVector = simplifyVector, simplifyDataFrame = simplifyDataFrame, simplifyMatrix = simplifySubMatrix, columnmajor = columnmajor, flatten = flatten) # fix for mongo style dates turning into scalars *after* simplifying # only happens when simplifyDataframe=FALSE if(isTRUE(simplifyVector) && is.scalarlist(out) && all(vapply(out, is, logical(1), "POSIXt"))){ return(structure(list_to_vec(out), class=c("POSIXct", "POSIXt"))) } # test for matrix. Note that we have to take another look at x (before # list_to_vec on its elements) to differentiate between matrix and vector. if (isTRUE(simplifyMatrix) && isTRUE(simplifyVector) && is.matrixlist(out) && all(unlist(vapply(x, is.scalarlist, logical(1))))) { if(isTRUE(columnmajor)){ return(do.call(cbind, out)) } else { #this is currently the default return(do.call(rbind, out)) } } # Simplify higher arrays if (isTRUE(simplifyMatrix) && is.arraylist(out)){ if(isTRUE(columnmajor)){ return(array( data = do.call(cbind, out), dim = c(dim(out[[1]]), length(out)) )); } else { #this is currently the default return(array( data = do.call(rbind, lapply(out, as.vector)), dim = c(length(out), dim(out[[1]])) )); } } # try to enfoce homoList on unnamed lists if (isTRUE(homoList) && is.null(names(out))) { # coerse empty lists, caused by the ambiguous fromJSON('[]') isemptylist <- vapply(out, identical, logical(1), list()) if (any(isemptylist) & !all(isemptylist)) { # if all the others look like data frames, coerse to data frames! if (all(vapply(out[!isemptylist], is.data.frame, logical(1)))) { for (i in which(isemptylist)) { out[[i]] <- data.frame() } return(out) } # if all others look like atomic vectors, unlist all if (all(vapply(out[!isemptylist], function(z) { isTRUE(is.vector(z) && is.atomic(z)) }, logical(1)))) { for (i in which(isemptylist)) { out[[i]] <- vector(mode = typeof(out[[which(!isemptylist)[1]]])) } return(out) } } } # convert date object if( isTRUE(simplifyDate) && is.datelist(out) ){ return(structure(out[["$date"]]/1000, class=c("POSIXct", "POSIXt"))) } # return object return(out) } is.matrixlist <- function(x) { isTRUE(is.list(x) && length(x) && is.null(names(x)) && all(vapply(x, is.atomic, logical(1))) && all.identical(vapply(x, length, integer(1))) #&& all.identical(vapply(x, mode, character(1))) #this fails for: [ [ 1, 2 ], [ "NA", "NA" ] ] ); } is.arraylist <- function(x) { isTRUE(is.list(x) && length(x) && is.null(names(x)) && all(vapply(x, is.array, logical(1))) && all.identical(vapply(x, function(y){paste(dim(y), collapse="-")}, character(1))) ); } is.datelist <- function(x){ isTRUE(is.list(x) && identical(names(x), "$date") && is.numeric(x[["$date"]]) ); } all.identical <- function(x){ if(!length(x)) return(FALSE) for(i in x){ if(x[1] != i) return(FALSE) } return(TRUE) } jsonlite/R/num_to_char.R0000644000176200001440000000222512540777273014742 0ustar liggesusers#' @useDynLib jsonlite R_num_to_char num_to_char <- function(x, digits = NA, na_as_string = NA, use_signif = FALSE){ if(is.na(digits)) digits <- NA_integer_; stopifnot(is.numeric(x)) stopifnot(is.numeric(digits)) stopifnot(is.logical(na_as_string)) .Call(R_num_to_char, x, digits, na_as_string, use_signif) } #' @useDynLib jsonlite R_integer64_to_char integer64_to_char <- function(x, na_as_string = TRUE){ .Call(R_integer64_to_char, x, na_as_string) } num_to_char_R <- function(x, digits = NA, na_as_string = NA){ if(is.na(digits)) digits <- NA_integer_; stopifnot(is.numeric(x)) stopifnot(is.numeric(digits)) stopifnot(is.logical(na_as_string)) if(!is.integer(x) && !is.null(digits) && !is.na(digits)){ x <- round(x, digits) } #convert to strings tmp <- as.character(x) # in numeric variables, NA, NaN, Inf are replaced by character strings if (any(missings <- which(!is.finite(x)))) { if(is.na(na_as_string)){ tmp[missings] <- NA_character_; } else if(na_as_string){ tmp[missings] <- wrapinquotes(x[missings]) } else { tmp[missings] <- "null" } } #returns a character vector return(tmp) } jsonlite/R/asJSON.ts.R0000644000176200001440000000026712540777273014172 0ustar liggesusers#this is a placeholder for something better, hopefully #I have no idea what is appropriate for time series setMethod("asJSON", "ts", function(x, ...) { asJSON(as.vector(x), ...) }) jsonlite/R/fixNativeSymbol.R0000644000176200001440000000142012540777273015563 0ustar liggesusersfixNativeSymbol <- function(symbol) { if (is(symbol, "NativeSymbolInfo")) { # method depends on version rVersion <- getRversion() if (rVersion >= "3.0") { # in R 3.0 determine the dll that the symbol lives in name <- ifelse(is.null(symbol$package), symbol$dll[["name"]], symbol$package[["name"]]) # load package if not yet loaded try(getNamespace(name)) pkgDLL <- getLoadedDLLs()[[name]] # reconstruct the native symbol address newsymbol <- getNativeSymbolInfo(name = symbol$name, PACKAGE = pkgDLL, withRegistrationInfo = TRUE) symbol$address <- newsymbol$address return(symbol) } else if (rVersion >= "2.14") { return(getNativeSymbolInfo(symbol$name)) } } else { return(symbol) } } jsonlite/R/is.scalarlist.R0000644000176200001440000000044312540777273015217 0ustar liggesusersis_scalarlist_r <- function(x) { if(!is.list(x)) return(FALSE) for(i in x){ if(!is.atomic(i) || length(i) > 1) return(FALSE) } return(TRUE) } #' @useDynLib jsonlite C_is_scalarlist is_scalarlist_c <- function(x){ .Call(C_is_scalarlist, x) } is.scalarlist <- is_scalarlist_c; jsonlite/R/null_to_na.R0000644000176200001440000000124312540777273014575 0ustar liggesusers#' @useDynLib jsonlite C_null_to_na null_to_na_c <- function(x) { .Call(C_null_to_na, x) } null_to_na_r <- function(x){ if (!length(x)) { return(vector()) } #Start parsing missing values x2 <- x looks_like_character_vector = FALSE for(i in seq_along(x2)){ if(is.character(x2[[i]])){ x2[[i]] <- switch(x2[[i]], "NA" = NA, "NaN" = NaN, "Inf" = Inf, "-Inf" = -Inf, {looks_like_character_vector=TRUE; break} ) } } # Set x if(!looks_like_character_vector){ x <- x2 } # Convert NULL to NA x[vapply(x, is.null, logical(1))] <- NA #return return(x) } null_to_na <- null_to_na_c; jsonlite/R/print.R0000644000176200001440000000054512540777273013603 0ustar liggesusers#' @method print json #' @export print.json <- function(x, ...){ cat(x, "\n") } #' @method print scalar #' @export print.scalar <- function(x, ...){ original <- x; class(x) <- class(x)[-1] if(is.data.frame(x)){ row.names(x) <- "[x]" print(x) } else { cat("[x] ", asJSON(x, collapse = FALSE), "\n", sep="") } invisible(original) } jsonlite/R/asJSON.numeric.R0000644000176200001440000000213412572253334015171 0ustar liggesuserssetMethod("asJSON", "numeric", function(x, digits = 5, use_signif = is(digits, "AsIs"), na = c("string", "null", "NA"), auto_unbox = FALSE, collapse = TRUE, keep_vec_names = FALSE, indent = NA_integer_, ...) { # shiny legacy exception if(isTRUE(keep_vec_names) && length(names(x))){ warn_keep_vec_names() return(asJSON(as.list(x), digits = digits, use_signif = use_signif, na = na, auto_unbox = TRUE, collapse = collapse, ...)) } na <- match.arg(na); na_as_string <- switch(na, "string" = TRUE, "null" = FALSE, "NA" = NA, stop("invalid na_as_string") ) # old R implementation # tmp <- num_to_char_R(x, digits, na_as_string); # fast C implementation tmp <- if(is(x, "integer64")){ integer64_to_char(x, na_as_string) } else { num_to_char(x, digits, na_as_string, use_signif); } if(isTRUE(auto_unbox) && length(tmp) == 1){ return(tmp); } if(collapse){ collapse(tmp, indent = indent) } else { tmp } }) # This is for the bit64 package setOldClass("integer64") setMethod("asJSON", "integer64", getMethod("asJSON", "numeric")); jsonlite/R/asJSON.json.R0000644000176200001440000000037412540777273014514 0ustar liggesusers# If an object has already been encoded by toJSON(), do not encode it again setOldClass("json") setMethod("asJSON", "json", function(x, json_verbatim = FALSE, ...) { if(isTRUE(json_verbatim)){ x } else { asJSON(as.character(x), ...) } }) jsonlite/R/utf8conv.R0000644000176200001440000000010712540777273014215 0ustar liggesusersutf8conv <- function(x) { gsub("","\\\\u\\1",x) } jsonlite/R/parseJSON.R0000644000176200001440000000055712540777273014256 0ustar liggesusersparseJSON <- function(txt, bigint_as_char = FALSE) { if(is(txt, "connection")){ parse_con(txt, 1024^2, bigint_as_char) } else { parse_string(txt, bigint_as_char) } } #' @useDynLib jsonlite R_parse parse_string <- function(txt, bigint_as_char){ if (length(txt) > 1) { txt <- paste(txt, collapse = "\n") } .Call(R_parse, txt, bigint_as_char) } jsonlite/R/loadpkg.R0000644000176200001440000000030612540777273014063 0ustar liggesusersloadpkg <- function(pkg){ tryCatch(getNamespace(pkg), error = function(e) { stop("Required package ", pkg, " not found. Please run: install.packages('", pkg, "')", call. = FALSE) }) } jsonlite/R/warn_keep_vec_names.R0000644000176200001440000000053212540777273016436 0ustar liggesuserswarn_keep_vec_names <- function() { message("Input to asJSON(keep_vec_names=TRUE) is a named vector. ", "In a future version of jsonlite, this option will not be supported, ", "and named vectors will be translated into arrays instead of objects. ", "If you want JSON object output, please use a named list instead. See ?toJSON.") } jsonlite/R/helpfunctions.R0000644000176200001440000000135012540777273015323 0ustar liggesusers# S4 to list object. Not quite sure if this really works in general. You probably # shouldn't use S4 instances with JSON anyway because you don't know the class # definition. S4tolist <- function(x) { structure(lapply(slotNames(x), slot, object = x), .Names = slotNames(x)) } # ENCODING TOOLS # opposite of unname: force list into named list to get key/value json encodings givename <- function(obj) { return(structure(obj, names = as.character(names(obj)))) } # trim whitespace trim <- function(x) { gsub("(^[[:space:]]+|[[:space:]]+$)", "", x) } # put double quotes around a string wrapinquotes <- function(x) { paste("\"", x, "\"", sep = "") } # DECODING TOOLS evaltext <- function(text) { return(eval(parse(text = text))) } jsonlite/R/toJSON.R0000644000176200001440000000256212626136244013554 0ustar liggesusers#' @rdname fromJSON toJSON <- function(x, dataframe = c("rows", "columns", "values"), matrix = c("rowmajor", "columnmajor"), Date = c("ISO8601", "epoch"), POSIXt = c("string", "ISO8601", "epoch", "mongo"), factor = c("string", "integer"), complex = c("string", "list"), raw = c("base64", "hex", "mongo"), null = c("list", "null"), na = c("null", "string"), auto_unbox = FALSE, digits = 4, pretty = FALSE, force = FALSE, ...) { # validate args dataframe <- match.arg(dataframe) matrix <- match.arg(matrix) Date <- match.arg(Date) POSIXt <- match.arg(POSIXt) factor <- match.arg(factor) complex <- match.arg(complex) raw <- match.arg(raw) null <- match.arg(null) # force x <- force(x) # edge case because 'null' in itself is not valid json if(is.null(x)){ null <- "list" } #this is just to check, we keep method-specific defaults if(!missing(na)){ na <- match.arg(na) } else { na <- NULL } indent <- if (isTRUE(pretty)) 0L else NA_integer_ # dispatch ans <- asJSON(x, dataframe = dataframe, Date = Date, POSIXt = POSIXt, factor = factor, complex = complex, raw = raw, matrix = matrix, auto_unbox = auto_unbox, digits = digits, na = na, null = null, force = force, indent = indent, ...) #prettify with yajl if(is.numeric(pretty)) { prettify(ans, pretty) } else { class(ans) <- "json" return(ans) } } jsonlite/R/asJSON.complex.R0000644000176200001440000000127012540777273015206 0ustar liggesuserssetMethod("asJSON", "complex", function(x, digits = 5, collapse = TRUE, complex = c("string", "list"), na = c("string", "null", "NA"), oldna = NULL, ...) { # validate na <- match.arg(na); complex <- match.arg(complex) #turn into strings if (complex == "string") { #default NA is "NA" mystring <- prettyNum(x = x, digits = digits) if (any(missings <- which(!is.finite(x)))){ if (na %in% c("null", "NA")) { mystring[missings] <- NA_character_; } } asJSON(mystring, collapse = collapse, na = na, ...) } else { if(na == "NA"){ na <- oldna; } asJSON(list(real = Re(x), imaginary = Im(x)), na = na, digits = digits, ...) } }) jsonlite/R/as.scalar.R0000644000176200001440000000120212540777273014305 0ustar liggesusersas.scalar <- function(obj) { # Lists can never be a scalar (this can arise if a dataframe contains a column # with lists) if(length(dim(obj)) > 1){ if(!identical(nrow(obj), 1L)){ warning("Tried to use as.scalar on an array or dataframe with ", nrow(obj), " rows.", call.=FALSE) return(obj) } } else if(!identical(length(obj), 1L)) { warning("Tried to use as.scalar on an object of length ", length(obj), call.=FALSE) return(obj) } else if(is.namedlist(obj)){ warning("Tried to use as.scalar on a named list.", call.=FALSE) return(obj) } class(obj) <- c("scalar", class(obj)) return(obj) } jsonlite/R/base64.R0000644000176200001440000000121712540777273013530 0ustar liggesusers# These functions have been taken from the base64 package by Francois Romain. It # was easier to copy then to import. They will not be exported base64_decode <- function(input) { stopifnot(is.character(input)) inputtf <- tempfile() writeLines(input, inputtf) output <- tempfile() invisible(.Call("base64_decode_", inputtf, output)) readBin(output, "raw", file.info(output)$size) } base64_encode <- function(input, linesize = 1e+09) { stopifnot(is.raw(input)) inputtf <- tempfile() writeBin(input, inputtf) output <- tempfile() invisible(.Call("base64_encode_", inputtf, output, as.integer(linesize))) return(readLines(output)) } jsonlite/R/asJSON.function.R0000644000176200001440000000037612540777273015372 0ustar liggesuserssetMethod("asJSON", "function", function(x, collapse = TRUE, fun = c("source", "list"), ...) { # validate fun <- match.arg(fun) if (fun == "source") { return(asJSON(deparse(x), ...)) } else { return(asJSON(as.list(x), ...)) } }) jsonlite/R/stream.R0000644000176200001440000002043412621624023013721 0ustar liggesusers#' Streaming JSON input/output #' #' The \code{stream_in} and \code{stream_out} functions implement line-by-line processing #' of JSON data over a \code{\link{connection}}, such as a socket, url, file or pipe. JSON #' streaming requires the \href{http://ndjson.org}{ndjson} format, which slightly differs #' from \code{\link{fromJSON}} and \code{\link{toJSON}}, see details. #' #' Because parsing huge JSON strings is difficult and inefficient, JSON streaming is done #' using \strong{lines of minified JSON records}, a.k.a. \href{http://ndjson.org}{ndjson}. #' This is pretty standard: JSON databases such as \href{https://github.com/maxogden/dat}{dat} #' or MongoDB use the same format to import/export datasets. Note that this means that the #' total stream combined is not valid JSON itself; only the individual lines are. Also note #' that because line-breaks are used as separators, prettified JSON is not permitted: the #' JSON lines \emph{must} be minified. In this respect, the format is a bit different from #' \code{\link{fromJSON}} and \code{\link{toJSON}} where all lines are part of a single JSON #' structure with optional line breaks. #' #' The \code{handler} is a callback function which is called for each page (batch) of #' JSON data with exactly one argument (usually a data frame with \code{pagesize} rows). #' If \code{handler} is missing or \code{NULL}, a default handler is used which stores all #' intermediate pages of data, and at the very end binds all pages together into one single #' data frame that is returned by \code{stream_in}. When a custom \code{handler} function #' is specified, \code{stream_in} does not store any intermediate results and always returns #' \code{NULL}. It is then up to the \code{handler} to process or store data pages. #' A \code{handler} function that does not store intermediate results in memory (for #' example by writing output to another connection) results in a pipeline that can process an #' unlimited amount of data. See example. #' #' If a connection is not opened yet, \code{stream_in} and \code{stream_out} #' will automatically open and later close the connection. Because R destroys connections #' when they are closed, they cannot be reused. To use a single connection for multiple #' calls to \code{stream_in} or \code{stream_out}, it needs to be opened #' beforehand. See example. #' #' @param con a \code{\link{connection}} object. If the connection is not open, #' \code{stream_in} and \code{stream_out} will automatically open #' and later close (and destroy) the connection. See details. #' @param handler a custom function that is called on each page of JSON data. If not specified, #' the default handler stores all pages and binds them into a single data frame that will be #' returned by \code{stream_in}. See details. #' @param x object to be streamed out. Currently only data frames are supported. #' @param pagesize number of lines to read/write from/to the connection per iteration. #' @param verbose print some information on what is going on. #' @param ... arguments for \code{\link{fromJSON}} and \code{\link{toJSON}} that #' control JSON formatting/parsing where applicable. Use with caution. #' @name stream_in, stream_out #' @aliases stream_in stream_out #' @export stream_in stream_out #' @rdname stream_in #' @references MongoDB export format: \url{http://docs.mongodb.org/manual/reference/program/mongoexport/#cmdoption--query} #' @references Documentation for the JSON Lines text file format: \url{http://jsonlines.org/} #' @return The \code{stream_out} function always returns \code{NULL}. #' When no custom handler is specified, \code{stream_in} returns a data frame of all pages binded together. #' When a custom handler function is specified, \code{stream_in} always returns \code{NULL}. #' @examples # compare formats #' x <- iris[1:3,] #' toJSON(x) #' stream_out(x) #' #' # Trivial example #' mydata <- stream_in(url("http://httpbin.org/stream/100")) #' #' \dontrun{stream large dataset to file and back #' library(nycflights13) #' stream_out(flights, file(tmp <- tempfile())) #' flights2 <- stream_in(file(tmp)) #' unlink(tmp) #' all.equal(flights2, as.data.frame(flights)) #' #' # stream over HTTP #' diamonds2 <- stream_in(url("http://jeroenooms.github.io/data/diamonds.json")) #' #' # stream over HTTP with gzip compression #' flights3 <- stream_in(gzcon(url("http://jeroenooms.github.io/data/nycflights13.json.gz"))) #' all.equal(flights3, as.data.frame(flights)) #' #' # stream over HTTPS (HTTP+SSL) via curl #' library(curl) #' flights4 <- stream_in(gzcon(curl("https://jeroenooms.github.io/data/nycflights13.json.gz"))) #' all.equal(flights4, as.data.frame(flights)) #' #' # or alternatively: #' flights5 <- stream_in(gzcon(pipe("curl https://jeroenooms.github.io/data/nycflights13.json.gz"))) #' all.equal(flights5, as.data.frame(flights)) #' #' # Full JSON IO stream from URL to file connection. #' # Calculate delays for flights over 1000 miles in batches of 5k #' library(dplyr) #' con_in <- gzcon(url("http://jeroenooms.github.io/data/nycflights13.json.gz")) #' con_out <- file(tmp <- tempfile(), open = "wb") #' stream_in(con_in, handler = function(df){ #' df <- dplyr::filter(df, distance > 1000) #' df <- dplyr::mutate(df, delta = dep_delay - arr_delay) #' stream_out(df, con_out, pagesize = 1000) #' }, pagesize = 5000) #' close(con_out) #' #' # stream it back in #' mydata <- stream_in(file(tmp)) #' nrow(mydata) #' unlink(tmp) #' #' # Data from http://openweathermap.org/current#bulk #' # Each row contains a nested data frame. #' daily14 <- stream_in(gzcon(url("http://78.46.48.103/sample/daily_14.json.gz")), pagesize=50) #' subset(daily14, city$name == "Berlin")$data[[1]] #' #' # Or with dplyr: #' library(dplyr) #' daily14f <- flatten(daily14) #' filter(daily14f, city.name == "Berlin")$data[[1]] #' #' # Stream import large data from zip file #' tmp <- tempfile() #' download.file("http://jsonstudio.com/wp-content/uploads/2014/02/companies.zip", tmp) #' companies <- stream_in(unz(tmp, "companies.json")) #' } stream_in <- function(con, handler = NULL, pagesize = 500, verbose = TRUE, ...) { # Maybe also handle URLs here in future. if(!is(con, "connection")){ stop("Argument 'con' must be a connection.") } # Same as mongolite count <- 0 cb <- if(is.null(handler)){ out <- new.env() function(x){ if(length(x)){ count <<- count + length(x) out[[as.character(count)]] <<- x } } } else { if(verbose) message("using a custom handler function.") function(x){ handler(post_process(x)) count <<- count + length(x) } } if(!isOpen(con, "r")){ if(verbose) message("opening ", is(con) ," input connection.") # binary connection prevents recoding of utf8 to latin1 on windows open(con, "rb") on.exit({ if(verbose) message("closing ", is(con) ," input connection.") close(con) }) } # Read data page by page repeat { page <- readLines(con, n = pagesize, encoding = "UTF-8") if(length(page)){ page <- Filter(nchar, page) cb(lapply(page, parseJSON)) if(verbose) cat("\r Found", count, "records...") } if(length(page) < pagesize) break } # Either return a big data frame, or nothing. if(is.null(handler)){ if(verbose) cat("\r Imported", count, "records. Simplifying into dataframe...\n") out <- as.list(out, sorted = FALSE) post_process(unlist(out[order(as.numeric(names(out)))], FALSE, FALSE)) } else { invisible() } } post_process <- function(x){ as.data.frame(simplify(x)) } #' @rdname stream_in stream_out <- function(x, con = stdout(), pagesize = 500, verbose = TRUE, ...) { if(!is(con, "connection")){ # Maybe handle URLs here in future. stop("Argument 'con' must be a connection.") } if(!isOpen(con, "w")){ if(verbose) message("opening ", is(con) ," output connection.") open(con, "wb") on.exit({ if(verbose) message("closing ", is(con) ," output connection.") close(con) }) } apply_by_pages(x, stream_out_page, pagesize = pagesize, con = con, verbose = verbose, ...); } stream_out_page <- function(page, con, ...){ # useBytes can sometimes prevent recoding of utf8 to latin1 on windows. # on windows there is a bug when useBytes is used with a (non binary) text connection. writeLines(enc2utf8(asJSON(page, collapse = FALSE, ...)), con = con, useBytes = TRUE) } jsonlite/R/deparse_vector.R0000644000176200001440000000140112540777273015444 0ustar liggesusers#' @useDynLib jsonlite C_escape_chars deparse_vector_c <- function(x) { .Call(C_escape_chars, x) } deparse_vector_r <- function(x) { stopifnot(is.character(x)) if(!length(x)) return(x) x <- gsub("\\", "\\\\", x, fixed=TRUE) x <- gsub("\"", "\\\"", x, fixed=TRUE) x <- gsub("\n", "\\n", x, fixed=TRUE) x <- gsub("\r", "\\r", x, fixed=TRUE) x <- gsub("\t", "\\t", x, fixed=TRUE) x <- gsub("\b", "\\b", x, fixed=TRUE) x <- gsub("\f", "\\f", x, fixed=TRUE) paste0("\"", x, "\"") } # Which implementation to use deparse_vector <- deparse_vector_c #Below are older implementations of the same function deparse_vector_old <- function(x) { stopifnot(is.character(x)) x <- gsub("[\v\a]", "", x) vapply(x, deparse, character(1), USE.NAMES=FALSE) } jsonlite/R/collapse.R0000644000176200001440000000076612540777273014256 0ustar liggesusers#' @useDynLib jsonlite C_collapse_array C_collapse_array_pretty_inner C_collapse_array_pretty_outer collapse <- function(x, inner = TRUE, indent = 0L) { if(is.na(indent)){ .Call(C_collapse_array, x) } else if(isTRUE(inner)){ .Call(C_collapse_array_pretty_inner, x, indent) } else { .Call(C_collapse_array_pretty_outer, x, indent) } } #' @useDynLib jsonlite C_row_collapse_array row_collapse <- function(m, indent = NA_integer_){ .Call(C_row_collapse_array, m, indent = indent) } jsonlite/R/asJSON.character.R0000644000176200001440000000210112547737675015476 0ustar liggesuserssetMethod("asJSON", "character", function(x, collapse = TRUE, na = c("null", "string", "NA"), auto_unbox = FALSE, keep_vec_names = FALSE, indent = NA_integer_, ...) { # shiny legacy exception if(isTRUE(keep_vec_names) && length(names(x))){ warn_keep_vec_names() return(asJSON(as.list(x), na = na, auto_unbox = TRUE, collapse = collapse, ...)) } # vectorized escaping tmp <- deparse_vector(x) # this was used with deparse_vector_old #if(identical(Encoding(x), "UTF-8")){ # if(!grepl("UTF", Sys.getlocale("LC_CTYPE"), ignore.case=TRUE)){ # tmp <- utf8conv(tmp); # } #} # validate NA if (any(missings <- which(is.na(x)))) { na <- match.arg(na) if (na %in% c("null")) { tmp[missings] <- "null" } else if(na %in% "string") { tmp[missings] <- "\"NA\"" } else { tmp[missings] <- NA_character_ } } if(isTRUE(auto_unbox) && length(tmp) == 1){ return(tmp); } # this is almost always true, except for class 'scalar' if (isTRUE(collapse)) { collapse(tmp, indent = indent) } else { tmp } }) jsonlite/R/asJSON.factor.R0000644000176200001440000000066312540777273015022 0ustar liggesuserssetMethod("asJSON", "factor", function(x, factor = c("string", "integer"), keep_vec_names = FALSE, ...) { # validate factor <- match.arg(factor) # dispatch if (factor == "integer") { # encode factor as enum asJSON(unclass(x), ...) } else { # encode as strings xc <- as.character(x) if(isTRUE(keep_vec_names)){ names(xc) <- names(x) } asJSON(xc, keep_vec_names = keep_vec_names, ...) } }) jsonlite/R/collapse_object.R0000644000176200001440000000062112540777273015572 0ustar liggesusers#' @useDynLib jsonlite C_collapse_object C_collapse_object_pretty collapse_object <- function(x, y, indent = 0L) { if(is.na(indent)){ .Call(C_collapse_object, x, y) } else { .Call(C_collapse_object_pretty, x, y, indent) } } #' @useDynLib jsonlite C_row_collapse_object row_collapse_object <- function(x, m, indent = NA_integer_){ .Call(C_row_collapse_object, x, m, indent = indent) } jsonlite/R/asJSON.list.R0000644000176200001440000000274312540777273014520 0ustar liggesuserssetMethod("asJSON", "list", function(x, collapse = TRUE, na = NULL, oldna = NULL, is_df = FALSE, auto_unbox = FALSE, indent = NA_integer_, ...) { # reset na arg when called from data frame if(identical(na, "NA")){ na <- oldna; } # coerse pairlist if needed if (is.pairlist(x)) { x <- as.vector(x, mode = "list") } # empty vector #if (!length(x)) { # if(collapse) { # return(if (is.null(names(x))) "[]" else "{}") # } else { # return(character()) # } #} # this condition appears when a dataframe contains a column with lists we need to # do this, because the [ operator always returns a list of length 1 # if (length(x) == 1 && is.null(names(x)) && collapse == FALSE) { # return(asJSON(x[[1]], ...)) # } # note we are NOT passing on the container argument. tmp <- if(is_df && auto_unbox){ vapply(x, function(y, ...) { asJSON(y, auto_unbox = is.list(y), ...) }, character(1), na = na, indent = indent + 2L, ...) } else { vapply(x, asJSON, character(1), na = na, auto_unbox = auto_unbox, indent = indent + 2L, ...) } if (!is.null(names(x))) { if(!collapse){ #this should never happen warning("collapse=FALSE called for named list.") } #in case of named list: objnames <- deparse_vector(cleannames(names(x))) collapse_object(objnames, tmp, indent) } else { #in case of unnamed list: if(collapse){ collapse(tmp, inner = FALSE, indent) } else { tmp } } }) jsonlite/R/unbox.R0000644000176200001440000000362312540777273013602 0ustar liggesusers#' Unbox a vector or data frame #' #' This function marks an atomic vector or data frame as a #' \href{http://en.wikipedia.org/wiki/Singleton_(mathematics)}{singleton}, i.e. #' a set with exactly 1 element. Thereby, the value will not turn into an #' \code{array} when encoded into JSON. This can only be done for #' atomic vectors of length 1, or data frames with exactly 1 row. To automatically #' unbox all vectors of length 1 within an object, use the \code{auto_unbox} argument #'in \code{\link{toJSON}}. #' #' It is usually recommended to avoid this function and stick with the default #' encoding schema for the various \R{} classes. The only use case for this function #' is if you are bound to some specific predefined JSON structure (e.g. to #' submit to an API), which has no natural \R{} representation. Note that the default #' encoding for data frames naturally results in a collection of key-value pairs, #' without using \code{unbox}. #' #' @param x atomic vector of length 1, or data frame with 1 row. #' @return Returns a singleton version of \code{x}. #' @export #' @references \url{http://en.wikipedia.org/wiki/Singleton_(mathematics)} #' @examples toJSON(list(foo=123)) #' toJSON(list(foo=unbox(123))) #' #' # Auto unbox vectors of length one: #' x = list(x=1:3, y = 4, z = "foo", k = NULL) #' toJSON(x) #' toJSON(x, auto_unbox = TRUE) #' #' x <- iris[1,] #' toJSON(list(rec=x)) #' toJSON(list(rec=unbox(x))) unbox <- function(x){ if(is.null(x)){ return(x) } if(is.data.frame(x)){ if(nrow(x) == 1){ return(as.scalar(x)) } else { stop("Tried to unbox dataframe with ", nrow(x), " rows.") } } if(!is.vector(unclass(x)) || !is.atomic(x) || length(dim(x)) > 1){ stop("Only atomic vectors of length 1 or data frames with 1 row can be unboxed.") } if(identical(length(x), 1L)){ return(as.scalar(x)) } else { stop("Tried to unbox a vector of length ", length(x)) } } jsonlite/R/asJSON.Date.R0000644000176200001440000000053512552140466014406 0ustar liggesuserssetMethod("asJSON", "Date", function(x, Date = c("ISO8601", "epoch"), ...) { # Validate argument Date <- match.arg(Date) # select a schema output <- switch(Date, ISO8601 = as.character(x), epoch = unclass(x), default = stop("Invalid argument for 'Date':", Date) ) # Dispatch to character encoding asJSON(output, ...) }) jsonlite/R/validate.R0000644000176200001440000000106112540777273014232 0ustar liggesusers#' Validate JSON #' #' Test if a string contains valid JSON. Characters vectors will be collapsed into a single string. #' #' @param txt JSON string #' @export #' @useDynLib jsonlite R_validate #' @examples #Output from toJSON and serializeJSON should pass validation #' myjson <- toJSON(mtcars) #' validate(myjson) #TRUE #' #' #Something bad happened #' truncated <- substring(myjson, 1, 100) #' validate(truncated) #FALSE validate <- function(txt) { stopifnot(is.character(txt)) txt <- paste(txt, collapse = "\n") .Call(R_validate, as.character(txt)) } jsonlite/R/asJSON.data.frame.R0000644000176200001440000000707012540777273015545 0ustar liggesuserssetMethod("asJSON", "data.frame", function(x, na = c("NA", "null", "string"), collapse = TRUE, dataframe = c("rows", "columns", "values"), complex = "string", oldna = NULL, rownames = NULL, keep_vec_names = FALSE, indent = NA_integer_, ...) { # Coerse pairlist if needed if (is.pairlist(x)) { x <- as.vector(x, mode = "list") } # Validate some args dataframe <- match.arg(dataframe) has_names <- identical(length(names(x)), ncol(x)) # Default to adding row names only if they are strings and not just stringified numbers if(isTRUE(rownames) || (is.null(rownames) && is.character(attr(x, "row.names")) && !all(grepl("^\\d+$", row.names(x))))){ # we don't use row.names() because this converts numbers to strings, # which will break sorting if(has_names){ x[["_row"]] <- attr(x, "row.names") } } # Unname named lists columns. These are very rare. namedlistvars <- which(vapply(x, is.namedlistnotdf, logical(1))) for (i in namedlistvars) { x[[i]] <- unname(x[[i]]) } # Convert POSIXlt to POSIXct before we start messing with lists posvars <- which(vapply(x, is, logical(1), "POSIXlt")) for (i in posvars) { x[[i]] <- as.POSIXct(x[[i]]) } # Column based is same as list. Do not pass collapse arg because it is a named list. if (dataframe == "columns") { return(asJSON(as.list(x), is_df = TRUE, na = na, dataframe = dataframe, complex = complex, rownames = rownames, indent = indent, ...)) } # Determine "oldna". This is needed when the data frame contains a list column if(missing(na) || !length(na) || identical(na, "NA")){ oldna <- NULL } else { oldna <- na; } # Set default for row based, don't do it earlier because it will affect 'oldna' or dataframe="columns" if(dataframe == "rows" && has_names){ na <- match.arg(na) } # no records if (!nrow(x)) { return(asJSON(list(), collapse=collapse, indent=indent)) } # Convert raw vectors rawvars <- which(vapply(x, is.raw, logical(1))) for (i in rawvars) { x[[i]] <- as.character.hexmode(x[[i]]) } # Turn complex vectors into data frames if(complex == "list"){ complxvars <- which(vapply(x, is.complex, logical(1))) for (i in complxvars) { x[[i]] <- data.frame(real=Re(x[[i]]), imaginary=Im(x[[i]])) } } #create a matrix of json elements dfnames <- deparse_vector(cleannames(names(x))) out <- vapply(x, asJSON, character(nrow(x)), collapse=FALSE, complex = complex, na = na, oldna = oldna, rownames = rownames, dataframe = dataframe, indent = indent + 2L, ..., USE.NAMES = FALSE) # This would be another way of doing the missing values # This does not require the individual classes to support na="NA" #if(identical(na, "NA")){ # namatrix <- vapply(x, is.na, logical(nrow(x))) # out[namatrix] <- NA; #} #this is a workaround for vapply simplifying into a vector for n=1 (not for n=0 surprisingly) if(!is.matrix(out)){ out <- t(out) } # turn the matrix into json records # note: special row_collapse functions because apply is slow! tmp <- if(dataframe == "rows" && (length(dfnames) == ncol(out))) { #apply(out, 1, collapse_object, x = dfnames, indent = indent + 2L); row_collapse_object(dfnames, out, indent = indent + 2L) } else { # for dataframe = "values" #apply(out, 1, collapse, indent = indent); row_collapse(out, indent = indent) } #collapse if(isTRUE(collapse)){ collapse(tmp, inner = FALSE, indent = indent) } else { tmp } }) is.namedlistnotdf <- function(x){ isTRUE(is.list(x) && !is.data.frame(x) && !is.null(names(x))) } jsonlite/R/push_parser.R0000644000176200001440000000132312557106037014765 0ustar liggesusers# Default to 100kb chunks. parse_con <- function(con, n , bigint_as_char){ stopifnot(is(con, "connection")) if(!isOpen(con)){ open(con, "rb") on.exit(close(con)) } feed_push_parser(readBin(con, raw(), n), reset = TRUE) while(length(buf <- readBin(con, raw(), n))) { feed_push_parser(buf) } finalize_push_parser(bigint_as_char) } #' @useDynLib jsonlite R_feed_push_parser feed_push_parser <- function(data, reset = FALSE){ if(is.character(data)){ data <- charToRaw(data) } stopifnot(is.raw(data)) .Call(R_feed_push_parser, data, reset) } #' @useDynLib jsonlite R_finalize_push_parser finalize_push_parser <- function(bigint_as_char){ .Call(R_finalize_push_parser, bigint_as_char) } jsonlite/R/asJSON.ANY.R0000644000176200001440000000201612540777273014165 0ustar liggesusers#' @import methods setMethod("asJSON", "ANY", function(x, force = FALSE, ...) { if (isS4(x) && !is(x, "classRepresentation")) { if (isTRUE(force)) { return(asJSON(attributes(x), force = force, ...)) } else { stop("No method for S4 class:", class(x)) } } else if (length(class(x)) > 1) { # If an object has multiple classes, we recursively try the next class. This is # S3 style dispatching that doesn't work by default for formal method definitions # There should be a more native way to accomplish this return(asJSON(structure(x, class = class(x)[-1]), force = force, ...)) } else if (isTRUE(force) && existsMethod("asJSON", class(unclass(x)))) { # As a last resort we can force encoding using the unclassed object return(asJSON(unclass(x), force = force, ...)) } else if (isTRUE(force)) { return(asJSON(NULL)) warning("No method asJSON S3 class: ", class(x)) } else { # If even that doesn't work, we give up. stop("No method asJSON S3 class: ", class(x)) } }) jsonlite/vignettes/0000755000176200001440000000000012626133701014112 5ustar liggesusersjsonlite/vignettes/json-apis.Rmd.orig0000644000176200001440000001452312573053647017440 0ustar liggesusers--- title: "Fetching JSON data from REST APIs" date: "`r Sys.Date()`" output: html_document vignette: > %\VignetteIndexEntry{Fetching JSON data from REST APIs} %\VignetteEngine{knitr::rmarkdown} \usepackage[utf8]{inputenc} --- ```{r echo=FALSE} library(knitr) opts_chunk$set(comment="") #this replaces tabs by spaces because latex-verbatim doesn't like tabs #no longer needed with yajl #toJSON <- function(...){ # gsub("\t", " ", jsonlite::toJSON(...), fixed=TRUE); #} ``` This section lists some examples of public HTTP APIs that publish data in JSON format. These are great to get a sense of the complex structures that are encountered in real world JSON data. All services are free, but some require registration/authentication. Each example returns lots of data, therefore not all output is printed in this document. ```{r message=FALSE} library(jsonlite) ``` ## Github Github is an online code repository and has APIs to get live data on almost all activity. Below some examples from a well known R package and author: ```{r} hadley_orgs <- fromJSON("https://api.github.com/users/hadley/orgs") hadley_repos <- fromJSON("https://api.github.com/users/hadley/repos") gg_commits <- fromJSON("https://api.github.com/repos/hadley/ggplot2/commits") gg_issues <- fromJSON("https://api.github.com/repos/hadley/ggplot2/issues") #latest issues paste(format(gg_issues$user$login), ":", gg_issues$title) ``` ## CitiBike NYC A single public API that shows location, status and current availability for all stations in the New York City bike sharing imitative. ```{r} citibike <- fromJSON("http://citibikenyc.com/stations/json") stations <- citibike$stationBeanList colnames(stations) nrow(stations) ``` ## Ergast The Ergast Developer API is an experimental web service which provides a historical record of motor racing data for non-commercial purposes. ```{r} res <- fromJSON('http://ergast.com/api/f1/2004/1/results.json') drivers <- res$MRData$RaceTable$Races$Results[[1]]$Driver colnames(drivers) drivers[1:10, c("givenName", "familyName", "code", "nationality")] ``` ## ProPublica Below an example from the [ProPublica Nonprofit Explorer API](http://projects.propublica.org/nonprofits/api) where we retrieve the first 10 pages of tax-exempt organizations in the USA, ordered by revenue. The `rbind.pages` function is used to combine the pages into a single data frame. ```{r, message=FALSE} #store all pages in a list first baseurl <- "https://projects.propublica.org/nonprofits/api/v1/search.json?order=revenue&sort_order=desc" pages <- list() for(i in 0:10){ mydata <- fromJSON(paste0(baseurl, "&page=", i), flatten=TRUE) message("Retrieving page ", i) pages[[i+1]] <- mydata$filings } #combine all into one filings <- rbind.pages(pages) #check output nrow(filings) filings[1:10, c("organization.sub_name", "organization.city", "totrevenue")] ``` ## New York Times The New York Times has several APIs as part of the NYT developer network. These interface to data from various departments, such as news articles, book reviews, real estate, etc. Registration is required (but free) and a key can be obtained at [here](http://developer.nytimes.com/docs/reference/keys). The code below includes some example keys for illustration purposes. ```{r} #search for articles article_key <- "&api-key=c2fede7bd9aea57c898f538e5ec0a1ee:6:68700045" url <- "http://api.nytimes.com/svc/search/v2/articlesearch.json?q=obamacare+socialism" req <- fromJSON(paste0(url, article_key)) articles <- req$response$docs colnames(articles) #search for best sellers bestseller_key <- "&api-key=5e260a86a6301f55546c83a47d139b0d:3:68700045" url <- "http://api.nytimes.com/svc/books/v2/lists/overview.json?published_date=2013-01-01" req <- fromJSON(paste0(url, bestseller_key)) bestsellers <- req$results$list category1 <- bestsellers[[1, "books"]] subset(category1, select = c("author", "title", "publisher")) #movie reviews movie_key <- "&api-key=5a3daaeee6bbc6b9df16284bc575e5ba:0:68700045" url <- "http://api.nytimes.com/svc/movies/v2/reviews/dvd-picks.json?order=by-date" req <- fromJSON(paste0(url, movie_key)) reviews <- req$results colnames(reviews) reviews[1:5, c("display_title", "byline", "mpaa_rating")] ``` ## CrunchBase CrunchBase is the free database of technology companies, people, and investors that anyone can edit. ```{r eval=FALSE} key <- "f6dv6cas5vw7arn5b9d7mdm3" res <- fromJSON(paste0("http://api.crunchbase.com/v/1/search.js?query=R&api_key=", key)) head(res$results) ``` ## Sunlight Foundation The Sunlight Foundation is a non-profit that helps to make government transparent and accountable through data, tools, policy and journalism. Register a free key at [here](http://sunlightfoundation.com/api/accounts/register/). An example key is provided. ```{r} key <- "&apikey=39c83d5a4acc42be993ee637e2e4ba3d" #Find bills about drones drone_bills <- fromJSON(paste0("http://openstates.org/api/v1/bills/?q=drone", key)) drone_bills$title <- substring(drone_bills$title, 1, 40) print(drone_bills[1:5, c("title", "state", "chamber", "type")]) #Congress mentioning "constitution" res <- fromJSON(paste0("http://capitolwords.org/api/1/dates.json?phrase=immigration", key)) wordcount <- res$results wordcount$day <- as.Date(wordcount$day) summary(wordcount) #Local legislators legislators <- fromJSON(paste0("http://congress.api.sunlightfoundation.com/", "legislators/locate?latitude=42.96&longitude=-108.09", key)) subset(legislators$results, select=c("last_name", "chamber", "term_start", "twitter_id")) ``` ## Twitter The twitter API requires OAuth2 authentication. Some example code: ```{r} #Create your own appication key at https://dev.twitter.com/apps consumer_key = "EZRy5JzOH2QQmVAe9B4j2w"; consumer_secret = "OIDC4MdfZJ82nbwpZfoUO4WOLTYjoRhpHRAWj6JMec"; #Use basic auth library(httr) secret <- RCurl::base64(paste(consumer_key, consumer_secret, sep = ":")); req <- POST("https://api.twitter.com/oauth2/token", add_headers( "Authorization" = paste("Basic", secret), "Content-Type" = "application/x-www-form-urlencoded;charset=UTF-8" ), body = "grant_type=client_credentials" ); #Extract the access token token <- paste("Bearer", content(req)$access_token) #Actual API call url <- "https://api.twitter.com/1.1/statuses/user_timeline.json?count=10&screen_name=Rbloggers" req <- GET(url, add_headers(Authorization = token)) json <- content(req, as = "text") tweets <- fromJSON(json) substring(tweets$text, 1, 100) ``` jsonlite/vignettes/json-aaquickstart.Rmd0000644000176200001440000001017712613002655020226 0ustar liggesusers--- Title: "Getting started with JSON and jsonlite" date: "`r Sys.Date()`" output: html_document vignette: > %\VignetteIndexEntry{Getting started with JSON and jsonlite} %\VignetteEngine{knitr::rmarkdown} \usepackage[utf8]{inputenc} --- ```{r echo=FALSE} library(knitr) opts_chunk$set(comment="") #this replaces tabs by spaces because latex-verbatim doesn't like tabs #no longer needed because yajl does not use tabs. #toJSON <- function(...){ # gsub("\t", " ", jsonlite::toJSON(...), fixed=TRUE); #} ``` # Getting started with JSON and jsonlite The jsonlite package is a JSON parser/generator optimized for the web. Its main strength is that it implements a bidirectional mapping between JSON data and the most important R data types. Thereby we can convert between R objects and JSON without loss of type or information, and without the need for any manual data munging. This is ideal for interacting with web APIs, or to build pipelines where data structures seamlessly flow in and out of R using JSON. ```{r message=FALSE} library(jsonlite) all.equal(mtcars, fromJSON(toJSON(mtcars))) ``` This vignette introduces basic concepts to get started with jsonlite. For a more detailed outline and motivation of the mapping, see: [arXiv:1403.2805](http://arxiv.org/abs/1403.2805). ## Simplification Simplification is the process where JSON arrays automatically get converted from a list into a more specific R class. The `fromJSON` function has 3 arguments which control the simplification process: `simplifyVector`, `simplifyDataFrame` and `simplifyMatrix`. Each one is enabled by default. | JSON structure | Example JSON data | Simplifies to R class | Argument in fromJSON | | ----------------------|----------------------------------------------------------|-----------------------|----------------------| | Array of primitives | `["Amsterdam", "Rotterdam", "Utrecht", "Den Haag"]` | Atomic Vector | simplifyVector | | Array of objects | `[{"name":"Erik", "age":43}, {"name":"Anna", "age":32}]` | Data Frame | simplifyDataFrame | | Array of arrays | `[ [1, 2, 3], [4, 5, 6] ]` | Matrix | simplifyMatrix | ### Atomic Vectors When `simplifyVector` is enabled, JSON arrays containing **primitives** (strings, numbers, booleans or null) simplify into an atomic vector: ```{r} # A JSON array of primitives json <- '["Mario", "Peach", null, "Bowser"]' # Simplifies into an atomic vector fromJSON(json) ``` Without simplification, any JSON array turns into a list: ```{r} # No simplification: fromJSON(json, simplifyVector = FALSE) ``` ### Data Frames When `simplifyDataFrame` is enabled, JSON arrays containing **objects** (key-value pairs) simplify into a data frame: ```{r} json <- '[ {"Name" : "Mario", "Age" : 32, "Occupation" : "Plumber"}, {"Name" : "Peach", "Age" : 21, "Occupation" : "Princess"}, {}, {"Name" : "Bowser", "Occupation" : "Koopa"} ]' mydf <- fromJSON(json) mydf ``` The data frame gets converted back into the original JSON structure by `toJSON` (whitespace and line breaks are ignorable in JSON). ```{r} mydf$Ranking <- c(3, 1, 2, 4) toJSON(mydf, pretty=TRUE) ``` Hence you can go back and forth between dataframes and JSON, without any manual data restructuring. ### Matrices and Arrays When `simplifyMatrix` is enabled, JSON arrays containing **equal-length sub-arrays** simplify into a matrix (or higher order R array): ```{r} json <- '[ [1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12] ]' mymatrix <- fromJSON(json) mymatrix ``` Again, we can use `toJSON` to convert the matrix or array back into the original JSON structure: ```{r} toJSON(mymatrix, pretty = TRUE) ``` The simplification works for arrays of arbitrary dimensionality, as long as the dimensions match (R does not support ragged arrays). ```{r} json <- '[ [[1, 2], [3, 4]], [[5, 6], [7, 8]], [[9, 10], [11, 12]] ]' myarray <- fromJSON(json) myarray[1, , ] myarray[ , ,1] ``` This is all there is to it! For a more detailed outline and motivation of the mapping, see: [arXiv:1403.2805](http://arxiv.org/abs/1403.2805). jsonlite/vignettes/json-apis.Rmd0000644000176200001440000003534212573053666016504 0ustar liggesusers--- title: "Fetching JSON data from REST APIs" date: "2015-09-06" output: html_document vignette: > %\VignetteIndexEntry{Fetching JSON data from REST APIs} %\VignetteEngine{knitr::rmarkdown} \usepackage[utf8]{inputenc} --- This section lists some examples of public HTTP APIs that publish data in JSON format. These are great to get a sense of the complex structures that are encountered in real world JSON data. All services are free, but some require registration/authentication. Each example returns lots of data, therefore not all output is printed in this document. ```r library(jsonlite) ``` ## Github Github is an online code repository and has APIs to get live data on almost all activity. Below some examples from a well known R package and author: ```r hadley_orgs <- fromJSON("https://api.github.com/users/hadley/orgs") hadley_repos <- fromJSON("https://api.github.com/users/hadley/repos") gg_commits <- fromJSON("https://api.github.com/repos/hadley/ggplot2/commits") gg_issues <- fromJSON("https://api.github.com/repos/hadley/ggplot2/issues") #latest issues paste(format(gg_issues$user$login), ":", gg_issues$title) ``` ``` [1] "idavydov : annotate(\"segment\") wrong position if limits are inverted" [2] "ben519 : geom_polygon doesn't make NA values grey when using continuous fill" [3] "has2k1 : Fix multiple tiny issues in the position classes" [4] "neggert : Problem with geom_bar position=fill and faceting" [5] "robertzk : Fix typo in geom_linerange docs." [6] "lionel- : stat_bar() gets confused with numeric discrete data?" [7] "daattali : Request: support theme axis.ticks.length.x and axis.ticks.length.y" [8] "sethchandler : Documentation error on %+replace% ?" [9] "daattali : dev version 1.0.1.9003 has some breaking changes" [10] "lionel- : Labels" [11] "nutterb : legend for `geom_line` colour disappears when `alpha` < 1.0" [12] "wch : scale_name property should be removed from Scale objects" [13] "wch : scale_details arguments in Coords should be renamed panel_scales or scale" [14] "wch : ScalesList-related functions should be moved into ggproto object" [15] "wch : update_geom_defaults and update_stat_defaults should accept Geom and Stat objects" [16] "wch : Make some ggproto objects immutable. Closes #1237" [17] "and3k : Control size of the border and padding of geom_label" [18] "hadley : Consistent argument order and formatting for layer functions" [19] "hadley : Consistently handle missing values" [20] "cmohamma : fortify causes fatal error" [21] "lionel- : Flawed `label_bquote()` implementation" [22] "beroe : Create alias for `colors=` in `scale_color_gradientn()`" [23] "and3k : hjust broken in y facets" [24] "joranE : Allow color bar guides for alpha scales" [25] "hadley : dir = \"v\" also needs to swap nrow and ncol" [26] "joranE : Add examples for removing guides" [27] "lionel- : New approach for horizontal layers" [28] "bbolker : add horizontal linerange geom" [29] "hadley : Write vignette about grid" [30] "hadley : Immutable flag for ggproto objects" ``` ## CitiBike NYC A single public API that shows location, status and current availability for all stations in the New York City bike sharing imitative. ```r citibike <- fromJSON("http://citibikenyc.com/stations/json") stations <- citibike$stationBeanList colnames(stations) ``` ``` [1] "id" "stationName" [3] "availableDocks" "totalDocks" [5] "latitude" "longitude" [7] "statusValue" "statusKey" [9] "availableBikes" "stAddress1" [11] "stAddress2" "city" [13] "postalCode" "location" [15] "altitude" "testStation" [17] "lastCommunicationTime" "landMark" ``` ```r nrow(stations) ``` ``` [1] 509 ``` ## Ergast The Ergast Developer API is an experimental web service which provides a historical record of motor racing data for non-commercial purposes. ```r res <- fromJSON('http://ergast.com/api/f1/2004/1/results.json') drivers <- res$MRData$RaceTable$Races$Results[[1]]$Driver colnames(drivers) ``` ``` [1] "driverId" "code" "url" "givenName" [5] "familyName" "dateOfBirth" "nationality" "permanentNumber" ``` ```r drivers[1:10, c("givenName", "familyName", "code", "nationality")] ``` ``` givenName familyName code nationality 1 Michael Schumacher MSC German 2 Rubens Barrichello BAR Brazilian 3 Fernando Alonso ALO Spanish 4 Ralf Schumacher SCH German 5 Juan Pablo Montoya MON Colombian 6 Jenson Button BUT British 7 Jarno Trulli TRU Italian 8 David Coulthard COU British 9 Takuma Sato SAT Japanese 10 Giancarlo Fisichella FIS Italian ``` ## ProPublica Below an example from the [ProPublica Nonprofit Explorer API](http://projects.propublica.org/nonprofits/api) where we retrieve the first 10 pages of tax-exempt organizations in the USA, ordered by revenue. The `rbind.pages` function is used to combine the pages into a single data frame. ```r #store all pages in a list first baseurl <- "https://projects.propublica.org/nonprofits/api/v1/search.json?order=revenue&sort_order=desc" pages <- list() for(i in 0:10){ mydata <- fromJSON(paste0(baseurl, "&page=", i), flatten=TRUE) message("Retrieving page ", i) pages[[i+1]] <- mydata$filings } #combine all into one filings <- rbind.pages(pages) #check output nrow(filings) ``` ``` [1] 275 ``` ```r filings[1:10, c("organization.sub_name", "organization.city", "totrevenue")] ``` ``` organization.sub_name organization.city 1 KAISER FOUNDATION HEALTH PLAN INC OAKLAND 2 KAISER FOUNDATION HEALTH PLAN INC OAKLAND 3 KAISER FOUNDATION HEALTH PLAN INC OAKLAND 4 DAVIDSON COUNTY COMMUNITY COLLEGE FOUNDATION INC LEXINGTON 5 KAISER FOUNDATION HOSPITALS OAKLAND 6 KAISER FOUNDATION HOSPITALS OAKLAND 7 KAISER FOUNDATION HOSPITALS OAKLAND 8 PARTNERS HEALTHCARE SYSTEM INC CHARLESTOWN 9 PARTNERS HEALTHCARE SYSTEM INC CHARLESTOWN 10 PARTNERS HEALTHCARE SYSTEM INC CHARLESTOWN totrevenue 1 42346486950 2 40148558254 3 37786011714 4 30821445312 5 20013171194 6 18543043972 7 17980030355 8 10619215354 9 10452560305 10 9636630380 ``` ## New York Times The New York Times has several APIs as part of the NYT developer network. These interface to data from various departments, such as news articles, book reviews, real estate, etc. Registration is required (but free) and a key can be obtained at [here](http://developer.nytimes.com/docs/reference/keys). The code below includes some example keys for illustration purposes. ```r #search for articles article_key <- "&api-key=c2fede7bd9aea57c898f538e5ec0a1ee:6:68700045" url <- "http://api.nytimes.com/svc/search/v2/articlesearch.json?q=obamacare+socialism" req <- fromJSON(paste0(url, article_key)) articles <- req$response$docs colnames(articles) ``` ``` [1] "web_url" "snippet" "lead_paragraph" [4] "abstract" "print_page" "blog" [7] "source" "multimedia" "headline" [10] "keywords" "pub_date" "document_type" [13] "news_desk" "section_name" "subsection_name" [16] "byline" "type_of_material" "_id" [19] "word_count" ``` ```r #search for best sellers bestseller_key <- "&api-key=5e260a86a6301f55546c83a47d139b0d:3:68700045" url <- "http://api.nytimes.com/svc/books/v2/lists/overview.json?published_date=2013-01-01" req <- fromJSON(paste0(url, bestseller_key)) bestsellers <- req$results$list category1 <- bestsellers[[1, "books"]] subset(category1, select = c("author", "title", "publisher")) ``` ``` author title publisher 1 Gillian Flynn GONE GIRL Crown Publishing 2 John Grisham THE RACKETEER Knopf Doubleday Publishing 3 E L James FIFTY SHADES OF GREY Knopf Doubleday Publishing 4 Nicholas Sparks SAFE HAVEN Grand Central Publishing 5 David Baldacci THE FORGOTTEN Grand Central Publishing ``` ```r #movie reviews movie_key <- "&api-key=5a3daaeee6bbc6b9df16284bc575e5ba:0:68700045" url <- "http://api.nytimes.com/svc/movies/v2/reviews/dvd-picks.json?order=by-date" req <- fromJSON(paste0(url, movie_key)) reviews <- req$results colnames(reviews) ``` ``` [1] "nyt_movie_id" "display_title" "sort_name" [4] "mpaa_rating" "critics_pick" "thousand_best" [7] "byline" "headline" "capsule_review" [10] "summary_short" "publication_date" "opening_date" [13] "dvd_release_date" "date_updated" "seo_name" [16] "link" "related_urls" "multimedia" ``` ```r reviews[1:5, c("display_title", "byline", "mpaa_rating")] ``` ``` display_title byline mpaa_rating 1 Tom at the Farm Stephen Holden NR 2 A Little Chaos Stephen Holden R 3 Big Game Andy Webster PG13 4 Balls Out Andy Webster R 5 Mad Max: Fury Road A. O. Scott R ``` ## CrunchBase CrunchBase is the free database of technology companies, people, and investors that anyone can edit. ```r key <- "f6dv6cas5vw7arn5b9d7mdm3" res <- fromJSON(paste0("http://api.crunchbase.com/v/1/search.js?query=R&api_key=", key)) head(res$results) ``` ## Sunlight Foundation The Sunlight Foundation is a non-profit that helps to make government transparent and accountable through data, tools, policy and journalism. Register a free key at [here](http://sunlightfoundation.com/api/accounts/register/). An example key is provided. ```r key <- "&apikey=39c83d5a4acc42be993ee637e2e4ba3d" #Find bills about drones drone_bills <- fromJSON(paste0("http://openstates.org/api/v1/bills/?q=drone", key)) drone_bills$title <- substring(drone_bills$title, 1, 40) print(drone_bills[1:5, c("title", "state", "chamber", "type")]) ``` ``` title state chamber type 1 WILDLIFE-TECH il lower bill 2 Criminalizes the unlawful use of an unma ny lower bill 3 Criminalizes the unlawful use of an unma ny lower bill 4 Relating to: criminal procedure and prov wi lower bill 5 Relating to: criminal procedure and prov wi upper bill ``` ```r #Congress mentioning "constitution" res <- fromJSON(paste0("http://capitolwords.org/api/1/dates.json?phrase=immigration", key)) wordcount <- res$results wordcount$day <- as.Date(wordcount$day) summary(wordcount) ``` ``` count day raw_count Min. : 1.00 Min. :1996-01-02 Min. : 1.00 1st Qu.: 3.00 1st Qu.:2001-01-22 1st Qu.: 3.00 Median : 8.00 Median :2005-11-16 Median : 8.00 Mean : 25.27 Mean :2005-10-02 Mean : 25.27 3rd Qu.: 21.00 3rd Qu.:2010-05-12 3rd Qu.: 21.00 Max. :1835.00 Max. :2015-08-05 Max. :1835.00 ``` ```r #Local legislators legislators <- fromJSON(paste0("http://congress.api.sunlightfoundation.com/", "legislators/locate?latitude=42.96&longitude=-108.09", key)) subset(legislators$results, select=c("last_name", "chamber", "term_start", "twitter_id")) ``` ``` last_name chamber term_start twitter_id 1 Lummis house 2015-01-06 CynthiaLummis 2 Enzi senate 2015-01-06 SenatorEnzi 3 Barrasso senate 2013-01-03 SenJohnBarrasso ``` ## Twitter The twitter API requires OAuth2 authentication. Some example code: ```r #Create your own appication key at https://dev.twitter.com/apps consumer_key = "EZRy5JzOH2QQmVAe9B4j2w"; consumer_secret = "OIDC4MdfZJ82nbwpZfoUO4WOLTYjoRhpHRAWj6JMec"; #Use basic auth library(httr) secret <- RCurl::base64(paste(consumer_key, consumer_secret, sep = ":")); req <- POST("https://api.twitter.com/oauth2/token", add_headers( "Authorization" = paste("Basic", secret), "Content-Type" = "application/x-www-form-urlencoded;charset=UTF-8" ), body = "grant_type=client_credentials" ); #Extract the access token token <- paste("Bearer", content(req)$access_token) #Actual API call url <- "https://api.twitter.com/1.1/statuses/user_timeline.json?count=10&screen_name=Rbloggers" req <- GET(url, add_headers(Authorization = token)) json <- content(req, as = "text") tweets <- fromJSON(json) substring(tweets$text, 1, 100) ``` ``` [1] "Analysing longitudinal data: Multilevel growth models (II) http://t.co/unUxszG7VJ #rstats" [2] "RcppDE 0.1.4 http://t.co/3qPhFzoOpj #rstats" [3] "Minimalist Maps http://t.co/fpkNznuCoX #rstats" [4] "Tutorials freely available of course I taught: including ggplot2, dplyr and shiny http://t.co/WsxX4U" [5] "Deploying Shiny apps with shinyapps.io http://t.co/tjef1pbKLt #rstats" [6] "Bootstrap Evaluation of Clusters http://t.co/EbY7ziKCz5 #rstats" [7] "Add external code to Rmarkdown http://t.co/RCJEmS8gyP #rstats" [8] "Linear models with weighted observations http://t.co/pUoHpvxAGC #rstats" [9] "dplyr 0.4.3 http://t.co/ze3zc8t7qj #rstats" [10] "xkcd survey and the power to shape the internet http://t.co/vNaKhxWxE4 #rstats" ``` jsonlite/vignettes/json-opencpu.Rnw0000644000176200001440000000766412540777273017254 0ustar liggesusers%\VignetteEngine{knitr::knitr} %\VignetteIndexEntry{Simple JSON RPC with OpenCPU} %This is a template. %Actual text goes in sources/content.Rnw \documentclass{article} \author{Jeroen Ooms} %useful packages \usepackage{url} \usepackage{fullpage} \usepackage{xspace} \usepackage{hyperref} \usepackage{fancyvrb} %for table positioning \usepackage{float} \restylefloat{table} %support for accents \usepackage[utf8]{inputenc} %support for ascii art \usepackage{pmboxdraw} %use vspace instead of indentation for paragraphs \usepackage{parskip} %extra line spacing \usepackage{setspace} \setstretch{1.25} %knitr style verbatim blocks \newenvironment{codeblock}{ \VerbatimEnvironment \definecolor{shadecolor}{rgb}{0.95, 0.95, 0.95}\color{fgcolor} \color{black} \begin{kframe} \begin{BVerbatim} }{ \end{BVerbatim} \end{kframe} } %placeholders for JSS/RJournal \newcommand{\pkg}[1]{\texttt{#1}} \newcommand{\code}[1]{\texttt{#1}} \newcommand{\file}[1]{\texttt{#1}} \newcommand{\dfn}[1]{\emph{#1}} \newcommand{\proglang}[1]{\texttt{#1}} %shorthands \newcommand{\JSON}{\texttt{JSON}\xspace} \newcommand{\R}{\texttt{R}\xspace} \newcommand{\C}{\texttt{C}\xspace} \newcommand{\toJSON}{\texttt{toJSON}\xspace} \newcommand{\fromJSON}{\texttt{fromJSON}\xspace} \newcommand{\XML}{\pkg{XML}\xspace} \newcommand{\jsonlite}{\pkg{jsonlite}\xspace} \newcommand{\RJSONIO}{\pkg{RJSONIO}\xspace} \newcommand{\API}{\texttt{API}\xspace} \newcommand{\JavaScript}{\texttt{JavaScript}\xspace} %trick for using same content file as chatper and article \newcommand{\maintitle}[1]{ \title{#1} \maketitle } %actual document \begin{document} \section*{Simple \JSON RPC with OpenCPU} The \jsonlite package is used by \texttt{OpenCPU} to convert between \JSON data and \R objects. Thereby clients can retrieve \R objects, or remotely call \R functions using \JSON where the function arguments as well as function return value are \JSON objects. For example to download the \texttt{Boston} data from the \texttt{MASS} package:\\ \begin{tabular}{|l|l|} \hline \textbf{Command in R} & \textbf{Example URL on OpenCPU} \\ \hline \texttt{toJSON(Boston, digits=4)} & \url{https://demo.ocpu.io/MASS/data/Boston/json?digits=4} \\ \hline \texttt{toJSON(Boston, dataframe="col")} & \url{https://demo.ocpu.io/MASS/data/Boston/json?dataframe=col} \\ \hline \texttt{toJSON(Boston, pretty=FALSE)} & \url{https://demo.ocpu.io/MASS/data/Boston/json?pretty=false} \\ \hline \end{tabular} \newline To calculate the variance of some the numbers \texttt{1:9} in the command line using using \texttt{curl}: \begin{Verbatim}[frame=single] curl https://demo.ocpu.io/stats/R/var/json -d "x=[1,2,3,4,5,6,7,8,9]" \end{Verbatim} Or equivalently post the entire body in \JSON format: \begin{Verbatim}[frame=single] curl https://demo.ocpu.io/stats/R/var/json -H "Content-Type: application/json" \ -d "{\"x\":[1,2,3,4,5,6,7,8,9]}" \end{Verbatim} Below an example where we call the \texttt{melt} function from the \texttt{reshape2} package using some example rows from the \texttt{airquality} data. Here both input and output consist of a data frame. \begin{Verbatim}[frame=single] curl https://demo.ocpu.io/reshape2/R/melt/json -d 'id=["Month", "Day"]&data=[ { "Ozone" : 41, "Solar.R" : 190, "Wind" : 7.4, "Temp" : 67, "Month" : 5, "Day" : 1 }, { "Ozone" : 36, "Solar.R" : 118, "Wind" : 8, "Temp" : 72, "Month" : 5, "Day" : 2 } ]' \end{Verbatim} Or equivalently: \begin{Verbatim}[frame=single] curl https://demo.ocpu.io/reshape2/R/melt/json -H "Content-Type: application/json" \ -d '{"id" : ["Month", "Day"], "data" : [ { "Ozone" : 41, "Solar.R" : 190, "Wind" : 7.4, "Temp" : 67, "Month" : 5, "Day" : 1 }, { "Ozone" : 36, "Solar.R" : 118, "Wind" : 8, "Temp" : 72, "Month" : 5, "Day" : 2 } ] }' \end{Verbatim} This request basically executes the following \R code: <>= mydata <- airquality[1:2,] y <- reshape2::melt(data = mydata, id = c("Month", "Day")) toJSON(y) @ %end \end{document} jsonlite/vignettes/references.bib0000644000176200001440000001041312540777273016725 0ustar liggesusers@manual{jsonschema, title={{JSON Schema: Core Definitions and Terminology}}, organization={Internet Engineering Task Force (IETF)}, author={F. Galiegue and K. Zyp}, year={2013}, url={https://tools.ietf.org/html/draft-zyp-json-schema-04}, } @manual{msgpack, title={{MessagePack: It's Like JSON. But Fast and Small}}, author={Sadayuki Furuhashi}, year={2014}, url={http://msgpack.org/}, } @BOOK{chodorow2013mongodb, title={MongoDB: The Definitive Guide}, author={Kristina Chodorow}, publisher={O'Reilly Media}, year={2013}, month={5}, edition={Second}, isbn={9781449344689}, url={http://amazon.com/o/ASIN/1449344682/}, price={$39.99}, totalpages={432}, timestamp={2014.05.05}, } @misc{jsonkeys, title={First Steps in Data Visualisation Using \texttt{d3.js}}, author={Mike Dewar}, organization={bit.ly}, year={2012}, note={New York Open Statistical Programming Meetup on Jan. 12, 2012}, url={http://vimeo.com/35005701#t=7m17s} } @article{lawson1979basic, author = {Lawson, C. L. and Hanson, R. J. and Kincaid, D. R. and Krogh, F. T.}, title = {Basic Linear Algebra Subprograms for Fortran Usage}, journal = {ACM Transactions on Mathematical Software}, issue_date = {Sept. 1979}, volume = {5}, number = {3}, month = sep, year = {1979}, issn = {0098-3500}, pages = {308--323}, numpages = {16}, url = {http://doi.acm.org/10.1145/355841.355847}, doi = {10.1145/355841.355847}, acmid = {355847}, publisher = {ACM}, address = {New York, NY, USA}, } @BOOK{anderson1999lapack, title={LAPACK Users' Guide (Software, Environments and Tools)}, author={E. Anderson and Z. Bai and C. Bischof and S. Blackford and J. Demmel and J. Dongarra and J. Du Croz and A. Greenbaum and S. Hammarling and A. McKenney and D. Sorensen}, publisher={Society for Industrial and Applied Mathematics}, year={1987}, month={1}, edition={3}, isbn={9780898714470}, url={http://amazon.com/o/ASIN/0898714478/}, price={$65.00}, totalpages={429}, timestamp={2014.05.05}, } @Manual{R, title = {R: A Language and Environment for Statistical Computing}, author = {{R Core Team}}, organization = {R Foundation for Statistical Computing}, address = {Vienna, Austria}, year = {2014}, url = {http://www.R-project.org/}, } @Manual{RJSONIO, title = {{\pkg{RJSONIO}: Serialize \R Objects to \JSON, \JavaScript Object Notation}}, author = {Duncan {Temple Lang}}, year = {2013}, note = {\R package version 1.0-3}, url = {http://CRAN.R-project.org/package=RJSONIO}, } @Manual{rjson, title = {{\pkg{rjson}: \JSON for \R}}, author = {Alex Couture-Beil}, year = {2013}, note = {\R package version 0.2.13}, url = {http://CRAN.R-project.org/package=rjson}, } @Manual{jsonlite, title = {{\pkg{jsonlite}: A Smarter \JSON Encoder for \R}}, author = {Jeroen Ooms and Duncan Temple Lang and Jonathan Wallace}, note = {\R package version 0.9.8}, url = {http://github.com/jeroenooms/jsonlite#readme}, year = {2014} } @misc{crockford2006application, author="D. Crockford", title="{The \texttt{application/json} Media Type for \JavaScript Object Notation (\JSON)}", series="Request for Comments", number="4627", howpublished="RFC 4627 (Informational)", publisher="IETF", organization="Internet Engineering Task Force", year=2006, month=jul, note="Obsoleted by RFCs 7158, 7159", url="http://www.ietf.org/rfc/rfc4627.txt", } @article{ecma1999262, title={{\proglang{ECMAScript} Language Specification}}, author={{Ecma International}}, journal={{European Association for Standardizing Information and Communication Systems}}, year={1999}, url={http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-262.pdf} } @article{tidydata, title={{Tidy Data}}, author={Wickham, Hadley}, journal={Under review}, year={2014}, url={http://vita.had.co.nz/papers/tidy-data.pdf} } @inproceedings{crockford2006json, title={{JSON: The Fat-free Alternative to XML}}, author={Crockford, Douglas}, booktitle={Proceedings of XML}, volume={2006}, year={2006}, url={http://www.json.org/fatfree.html} } @book{nolan2014xml, title={XML and Web Technologies for Data Sciences with \R}, author={Nolan, Deborah and Temple Lang, Duncan}, year={2014}, publisher={Springer-Verlag}, url={http://link.springer.com/book/10.1007/978-1-4614-7900-0} }jsonlite/vignettes/json-paging.Rmd.orig0000644000176200001440000000717112573051534017743 0ustar liggesusers--- title: "Combining pages of JSON data with jsonlite" date: "`r Sys.Date()`" output: html_document vignette: > %\VignetteIndexEntry{Combining pages of JSON data with jsonlite} %\VignetteEngine{knitr::rmarkdown} \usepackage[utf8]{inputenc} --- ```{r echo=FALSE} library(knitr) opts_chunk$set(comment="") #this replaces tabs by spaces because latex-verbatim doesn't like tabs toJSON <- function(...){ gsub("\t", " ", jsonlite::toJSON(...), fixed=TRUE); } ``` ```{r echo=FALSE, message=FALSE} library(jsonlite) ``` The [jsonlite](https://cran.r-project.org/package=jsonlite) package is a `JSON` parser/generator for R which is optimized for pipelines and web APIs. It is used by the OpenCPU system and many other packages to get data in and out of R using the `JSON` format. ## A bidirectional mapping One of the main strengths of `jsonlite` is that it implements a bidirectional [mapping](http://arxiv.org/abs/1403.2805) between JSON and data frames. Thereby it can convert nested collections of JSON records, as they often appear on the web, immediately into the appropriate R structure. For example to grab some data from ProPublica we can simply use: ```{r eval=FALSE} library(jsonlite) mydata <- fromJSON("https://projects.propublica.org/forensics/geos.json", flatten = TRUE) View(mydata) ``` The `mydata` object is a data frame which can be used directly for modeling or visualization, without the need for any further complicated data manipulation. ## Paging with jsonlite A question that comes up frequently is how to combine pages of data. Most web APIs limit the amount of data that can be retrieved per request. If the client needs more data than what can fits in a single request, it needs to break down the data into multiple requests that each retrieve a fragment (page) of data, not unlike pages in a book. In practice this is often implemented using a `page` parameter in the API. Below an example from the [ProPublica Nonprofit Explorer API](http://projects.propublica.org/nonprofits/api) where we retrieve the first 3 pages of tax-exempt organizations in the USA, ordered by revenue: ```{r} baseurl <- "https://projects.propublica.org/nonprofits/api/v1/search.json?order=revenue&sort_order=desc" mydata0 <- fromJSON(paste0(baseurl, "&page=0"), flatten = TRUE) mydata1 <- fromJSON(paste0(baseurl, "&page=1"), flatten = TRUE) mydata2 <- fromJSON(paste0(baseurl, "&page=2"), flatten = TRUE) #The actual data is in the filings element mydata0$filings[1:10, c("organization.sub_name", "organization.city", "totrevenue")] ``` To analyze or visualize these data, we need to combine the pages into a single dataset. We can do this with the `rbind.pages` function. Note that in this example, the actual data is contained by the `filings` field: ```{r} #Rows per data frame nrow(mydata0$filings) #Combine data frames filings <- rbind.pages( list(mydata0$filings, mydata1$filings, mydata2$filings) ) #Total number of rows nrow(filings) ``` ## Automatically combining many pages We can write a simple loop that automatically downloads and combines many pages. For example to retrieve the first 20 pages with non-profits from the example above: ```{r, message=FALSE} #store all pages in a list first baseurl <- "https://projects.propublica.org/nonprofits/api/v1/search.json?order=revenue&sort_order=desc" pages <- list() for(i in 0:20){ mydata <- fromJSON(paste0(baseurl, "&page=", i)) message("Retrieving page ", i) pages[[i+1]] <- mydata$filings } #combine all into one filings <- rbind.pages(pages) #check output nrow(filings) colnames(filings) ``` From here, we can go straight to analyzing the filings data without any further tedious data manipulation. jsonlite/vignettes/json-mapping.pdf.asis0000644000176200001440000000030412540777273020157 0ustar liggesusers%\VignetteIndexEntry{A mapping between JSON data and R objects} %\VignetteEngine{R.rsp::asis} %\VignetteKeyword{PDF} %\VignetteKeyword{HTML} %\VignetteKeyword{vignette} %\VignetteKeyword{package} jsonlite/vignettes/json-mapping.Rnw.orig0000644000176200001440000015272212540777273020171 0ustar liggesusers%\VignetteEngine{knitr::knitr} %\VignetteIndexEntry{A mapping between JSON data and R objects} <>= #For JSS #opts_chunk$set(prompt=TRUE, highlight=FALSE, background="white") #options(prompt = "R> ", continue = "+ ", width = 70, useFancyQuotes = FALSE) @ %This is a template. %Actual text goes in sources/content.Rnw \documentclass{article} \author{Jeroen Ooms} %useful packages \usepackage{url} \usepackage{fullpage} \usepackage{xspace} \usepackage{booktabs} \usepackage{enumitem} \usepackage[hidelinks]{hyperref} \usepackage[round]{natbib} \usepackage{fancyvrb} \usepackage[toc,page]{appendix} \usepackage{breakurl} %for table positioning \usepackage{float} \restylefloat{table} %support for accents \usepackage[utf8]{inputenc} %support for ascii art \usepackage{pmboxdraw} %use vspace instead of indentation for paragraphs \usepackage{parskip} %extra line spacing \usepackage{setspace} \setstretch{1.25} %knitr style verbatim blocks \newenvironment{codeblock}{ \VerbatimEnvironment \definecolor{shadecolor}{rgb}{0.95, 0.95, 0.95}\color{fgcolor} \color{black} \begin{kframe} \begin{BVerbatim} }{ \end{BVerbatim} \end{kframe} } %placeholders for JSS/RJournal \newcommand{\pkg}[1]{\texttt{#1}} \newcommand{\code}[1]{\texttt{#1}} \newcommand{\proglang}[1]{\texttt{#1}} %shorthands \newcommand{\JSON}{\texttt{JSON}\xspace} \newcommand{\R}{\proglang{R}\xspace} \newcommand{\C}{\proglang{C}\xspace} \newcommand{\toJSON}{\code{toJSON}\xspace} \newcommand{\fromJSON}{\code{fromJSON}\xspace} \newcommand{\XML}{\pkg{XML}\xspace} \newcommand{\jsonlite}{\pkg{jsonlite}\xspace} \newcommand{\RJSONIO}{\pkg{RJSONIO}\xspace} \newcommand{\API}{\texttt{API}\xspace} \newcommand{\JavaScript}{\proglang{JavaScript}\xspace} %trick for using same content file as chatper and article \newcommand{\maintitle}[1]{ \title{#1} \maketitle } %actual document \begin{document} \maintitle{The \jsonlite Package: A Practical and Consistent Mapping Between \JSON Data and \R Objects} <>= library(jsonlite) library(knitr) opts_chunk$set(comment="") #this replaces tabs by spaces because latex-verbatim doesn't like tabs toJSON <- function(...){ gsub("\t", " ", jsonlite::toJSON(...), fixed=TRUE); } @ \begin{abstract} A naive realization of \JSON data in \R maps \JSON \emph{arrays} to an unnamed list, and \JSON \emph{objects} to a named list. However, in practice a list is an awkward, inefficient type to store and manipulate data. Most statistical applications work with (homogeneous) vectors, matrices or data frames. Therefore \JSON packages in \R typically define certain special cases of \JSON structures which map to simpler \R types. Currently no formal guidelines or consensus exists on how \R data should be represented in \JSON. Furthermore, upon closer inspection, even the most basic data structures in \R actually do not perfectly map to their \JSON counterparts and leave some ambiguity for edge cases. These problems have resulted in different behavior between implementations and can lead to unexpected output for edge cases. This paper explicitly describes a mapping between \R classes and \JSON data, highlights potential problems, and outlines conventions that generalize the mapping to cover all common structures. We emphasize the importance of type consistency when using \JSON to exchange dynamic data, and illustrate using examples and anecdotes. The \jsonlite package is used throughout the paper as a reference implementation. \end{abstract} \section{Introduction} \emph{JavaScript Object Notation} (\JSON) is a text format for the serialization of structured data \citep{crockford2006application}. It is derived from the object literals of \proglang{JavaScript}, as defined in the \proglang{ECMAScript} programming language standard \citep{ecma1999262}. Design of \JSON is simple and concise in comparison with other text based formats, and it was originally proposed by Douglas Crockford as a ``fat-free alternative to \XML'' \citep{crockford2006json}. The syntax is easy for humans to read and write, easy for machines to parse and generate and completely described in a single page at \url{http://www.json.org}. The character encoding of \JSON text is always Unicode, using \texttt{UTF-8} by default \citep{crockford2006application}, making it naturally compatible with non-latin alphabets. Over the past years, \JSON has become hugely popular on the internet as a general purpose data interchange format. High quality parsing libraries are available for almost any programming language, making it easy to implement systems and applications that exchange data over the network using \JSON. For \R \citep{R}, several packages that assist the user in generating, parsing and validating \JSON are available through CRAN, including \pkg{rjson} \citep{rjson}, \pkg{RJSONIO} \citep{RJSONIO}, and \pkg{jsonlite} \citep{jsonlite}. The emphasis of this paper is not on discussing the \JSON format or any particular implementation for using \JSON with \R. We refer to \cite{nolan2014xml} for a comprehensive introduction, or one of the many tutorials available on the web. Instead we take a high level view and discuss how \R data structures are most naturally represented in \JSON. This is not a trivial problem, particularly for complex or relational data as they frequently appear in statistical applications. Several \R packages implement \toJSON and \fromJSON functions which directly convert \R objects into \JSON and vice versa. However, the exact mapping between the various \R data classes \JSON structures is not self evident. Currently, there are no formal guidelines, or even consensus between implementations on how \R data should be represented in \JSON. Furthermore, upon closer inspection, even the most basic data structures in \R actually do not perfectly map to their \JSON counterparts, and leave some ambiguity for edge cases. These problems have resulted in different behavior between implementations, and can lead to unexpected output for certain special cases. Furthermore, best practices of representing data in \JSON have been established outside the \R community. Incorporating these conventions where possible is important to maximize interoperability. %When relying on \JSON as the data interchange format, the mapping between \R objects and \JSON data must be consistent and unambiguous. Clients relying on \JSON to get data in and out of \R must know exactly what to expect in order to facilitate reliable communication, even if the data themselves are dynamic. Similarly, \R code using dynamic \JSON data from an external source is only reliable when the conversion from \JSON to \R is consistent. This document attempts to take away some of the ambiguity by explicitly describing the mapping between \R classes and \JSON data, highlighting problems and propose conventions that can generalize the mapping to cover all common classes and cases in R. \subsection{Parsing and type safety} The \JSON format specifies 4 primitive types (\texttt{string}, \texttt{number}, \texttt{boolean}, \texttt{null}) and two \emph{universal structures}: \begin{itemize} %[itemsep=3pt, topsep=5pt] \item A \JSON \emph{object}: an unordered collection of zero or more name-value pairs, where a name is a string and a value is a string, number, boolean, null, object, or array. \item A \JSON \emph{array}: an ordered sequence of zero or more values. \end{itemize} \noindent Both these structures are heterogeneous; i.e. they are allowed to contain elements of different types. Therefore, the native \R realization of these structures is a \texttt{named list} for \JSON objects, and \texttt{unnamed list} for \JSON arrays. However, in practice a list is an awkward, inefficient type to store and manipulate data in \R. Most statistical applications work with (homogeneous) vectors, matrices or data frames. In order to give these data structures a \JSON representation, we can define certain special cases of \JSON structures which get parsed into other, more specific \R types. For example, one convention which all current implementations have in common is that a homogeneous array of primitives gets parsed into an \texttt{atomic vector} instead of a \texttt{list}. The \pkg{RJSONIO} documentation uses the term ``simplify'' for this behavior, and we adopt this jargon. <<>>= txt <- '[12, 3, 7]' x <- fromJSON(txt) is(x) print(x) @ This seems very reasonable and it is the only practical solution to represent vectors in \JSON. However the price we pay is that automatic simplification can compromise type-safety in the context of dynamic data. For example, suppose an \R package uses \fromJSON to pull data from a \JSON \API on the web and that for some particular combination of parameters the result includes a \texttt{null} value, e.g: \texttt{[12, null, 7]}. This is actually quite common, many \API's use \texttt{null} for missing values or unset fields. This case makes the behavior of parser ambiguous, because the \JSON array is technically no longer homogeneous. And indeed, some implementations will now return a \texttt{list} instead of a \texttt{vector}. If the user had not anticipated this scenario and the script assumes a \texttt{vector}, the code is likely to run into type errors. The lesson here is that we need to be very specific and explicit about the mapping that is implemented to convert between \JSON data and \R objects. When relying on \JSON as a data interchange format, the behavior of the parser must be consistent and unambiguous. Clients relying on \JSON to get data in and out of \R must know exactly what to expect in order to facilitate reliable communication, even if the content of the data is dynamic. Similarly, \R code using dynamic \JSON data from an external source is only reliable when the conversion from \JSON to \R is consistent. Moreover a practical mapping must incorporate existing conventions and use the most natural representation of certain structures in \R. In the example above, we could argue that instead of falling back on a \texttt{list}, the array is more naturally interpreted as a numeric vector where the \texttt{null} becomes a missing value (\texttt{NA}). These principles will extrapolate as we start discussing more complex \JSON structures representing matrices and data frames. % \subsection{A Bidirectional Mapping} % % - bidirectional: one-to-one correspondence between JSON and \R classes with minimal coersing. % - relation is functional in each direction: json interface to \R objects, and \R objects can be used to manipulate a JSON structure. % - Results in unique coupling between json and objects that makes it natural to manipulate JSON in \R, and access \R objects from their JSON representation. % - Mild assumption of consistency. % - Supported classes: vectors of type numeric, character, logical, data frame and matrix. % - Natural class is implicit in the structure, rather than explicitly encode using metadata. % - Will show examples of why this is powerful. \subsection[Reference implementation: the jsonlite package]{Reference implementation: the \jsonlite package} The \jsonlite package provides a reference implementation of the conventions proposed in this document. It is a fork of the \RJSONIO package by Duncan Temple Lang, which builds on \texttt{libjson} \texttt{C++} library from Jonathan Wallace. \jsonlite uses the parser from \RJSONIO, but the \R code has been rewritten from scratch. Both packages implement \toJSON and \fromJSON functions, but their output is quite different. Finally, the \jsonlite package contains a large set of unit tests to validate that \R objects are correctly converted to \JSON and vice versa. These unit tests cover all classes and edge cases mentioned in this document, and could be used to validate if other implementations follow the same conventions. <>= library(testthat) test_package("jsonlite") @ Note that even though \JSON allows for inserting arbitrary white space and indentation, the unit tests assume that white space is trimmed. \subsection{Class-based versus type-based encoding} \label{serializejson} The \jsonlite package actually implements two systems for translating between \R objects and \JSON. This document focuses on the \toJSON and \fromJSON functions which use \R's class-based method dispatch. For all of the common classes in \R, the \jsonlite package implements \toJSON methods as described in this document. Users in \R can extend this system by implementing additional methods for other classes. This also means that classes that do not have the \toJSON method defined are not supported. Furthermore, the implementation of a specific \toJSON method determines which data and metadata in the objects of this class gets encoded in its \JSON representation, and how. In this respect, \toJSON is similar to e.g. the \texttt{print} function, which also provides a certain \emph{representation} of an object based on its class and optionally some print parameters. This representation does not necessarily reflect all information stored in the object, and there is no guaranteed one-to-one correspondence between \R objects and \JSON. I.e. calling \code{fromJSON(toJSON(object))} will return an object which only contains the data that was encoded by the \toJSON method for this particular class, and which might even have a different class than the original. The alternative to class-based method dispatch is to use type-based encoding, which \jsonlite implements in the functions \texttt{serializeJSON} and \code{unserializeJSON}. All data structures in \R get stored in memory using one of the internal \texttt{SEXP} storage types, and \code{serializeJSON} defines an encoding schema which captures the type, value, and attributes for each storage type. The resulting \JSON closely resembles the internal structure of the underlying \C data types, and can be perfectly restored to the original \R object using \code{unserializeJSON}. This system is relatively straightforward to implement, but the resulting \JSON is very verbose, hard to interpret, and cumbersome to generate in the context of another language or system. For most applications this is actually impractical because it requires the client/consumer to understand and manipulate \R data types, which is difficult and reduces interoperability. Instead we can make data in \R more accessible to third parties by defining sensible \JSON representations that are natural for the class of an object, rather than its internal storage type. This document does not discuss the \code{serializeJSON} system in any further detail, and solely treats the class based system implemented in \toJSON and \fromJSON. However the reader that is interested in full serialization of \R objects into \JSON is encouraged to have a look at the respective manual pages. \subsection{Scope and limitations} Before continuing, we want to stress some limitations of encoding \R data structures in \JSON. Most importantly, there are limitations to the types of objects that can be represented. In general, temporary in-memory properties such as connections, file descriptors and (recursive) memory references are always difficult if not impossible to store in a sensible way, regardless of the language or serialization method. This document focuses on the common \R classes that hold \emph{data}, such as vectors, factors, lists, matrices and data frames. We do not treat language level constructs such as expressions, functions, promises, which hold little meaning outside the context of \R. We also don't treat special compound classes such as linear models or custom classes defined in contributed packages. When designing systems or protocols that interact with \R, it is highly recommended to stick with the standard data structures for the interface input/output. Then there are limitations introduced by the format. Because \JSON is a human readable, text-based format, it does not support binary data, and numbers are stored in their decimal notation. The latter leads to loss of precision for real numbers, depending on how many digits the user decides to print. Several dialects of \JSON exists such as \texttt{BSON} \citep{chodorow2013mongodb} or \texttt{MSGPACK} \citep{msgpack}, which extend the format with various binary types. However, these formats are much less popular, less interoperable, and often impractical, precisely because they require binary parsing and abandon human readability. The simplicity of \JSON is what makes it an accessible and widely applicable data interchange format. In cases where it is really needed to include some binary data in \JSON, we can encode a blob as a string using \texttt{base64}. Finally, as mentioned earlier, \fromJSON is not a perfect inverse function of \toJSON, as is the case for \code{serialializeJSON} and \code{unserializeJSON}. The class based mappings are designed for concise and practical encoding of the various common data structures. Our implementation of \toJSON and \fromJSON approximates a reversible mapping between \R objects and \JSON for the standard data classes, but there are always limitations and edge cases. For example, the \JSON representation of an empty vector, empty list or empty data frame are all the same: \texttt{"[ ]"}. Also some special vector types such as factors, dates or timestamps get coerced to strings, as they would in for example \texttt{CSV}. This is a quite typical and expected behavior among text based formats, but it does require some additional interpretation on the consumer side. % \subsection{Goals: Consistent and Practical} % % It can be helpful to see the problem from both sides. The \R user needs to interface external \JSON data from within \R. This includes reading data from a public source/API, or posting a specific \JSON structure to an online service. From perspective of the \R user, \JSON data should be realized in \R using classes which are most natural in \R for a particular structure. A proper mapping is one which allows the \R user to read any incoming data or generate a specific \JSON structures using the familiar methods and classes in \R. Ideally, the \R user would like to forget about the interchange format at all, and think about the external data interface in terms of its corresponding \R structures rather than a \JSON schema. The other perspective is that of an third party client or language, which needs to interface data in \R using \JSON. This actor wants to access and manipulate \R objects via their \JSON representation. A good mapping is one that allows a 3rd party client to get data in and out of \R, without necessarily understanding the specifics of the underlying \R classes. Ideally, the external client could forget about the \R objects and classes at all, and think about input and output of data in terms of the \JSON schema, or the corresponding realization in the language of the client. % % Both sides come together in the context of an RPC service such as OpenCPU. OpenCPU exposes a HTTP API to let 3rd party clients call \R functions over HTTP. The function arguments are posted using \JSON and OpenCPU automatically converts these into \R objects to construct the \R function call. The return value of the function is then converted to \JSON and sent back to the client. To the client, the service works as a \JSON API, but it is implemented as standard \R function uses standard data structures for its arguments and return value. For this to work, the conversion between \JSON data and \R objects must be consistent and unambiguous. In the design of our mapping we have pursued the following requirements: % % \begin{itemize} % \item{Recognize and comply with existing conventions of encoding common data structures in \JSON, in particular (relational) data sets.} % \item{Consistently use a particular schema for a class of objects, including edge cases.} % \item{Avoid R-specific peculiarities to minimize opportunities for misinterpretation.} % \item{Mapping should optimally be reversible, but at least coercible for the standard classes.} % \item{Robustness principle: be strict on output but tolerant on input.} % \end{itemize} \section[Converting between JSON data and R classes]{Converting between \JSON data and \R classes} This section lists examples of how the common \R classes are represented in \JSON. As explained before, the \toJSON function relies on method dispatch, which means that objects get encoded according to their \texttt{class} attribute. If an object has multiple \texttt{class} values, \R uses the first occurring class which has a \toJSON method. If none of the classes of an object has a \toJSON method, an error is raised. \subsection{Atomic vectors} The most basic data type in \R is the atomic vector. Atomic vectors hold an ordered, homogeneous set of values of type \texttt{logical} (booleans), \texttt{character} (strings), \texttt{raw} (bytes), \texttt{numeric} (doubles), \texttt{complex} (complex numbers with a real and imaginary part), or \texttt{integer}. Because \R is fully vectorized, there is no user level notion of a primitive: a scalar value is considered a vector of length 1. Atomic vectors map to \JSON arrays: <<>>= x <- c(1, 2, pi) toJSON(x) @ The \JSON array is the only appropriate structure to encode a vector, even though vectors in \R are homogeneous, whereas the \JSON array is actually heterogeneous, but \JSON does not make this distinction. \subsubsection{Missing values} A typical domain specific problem when working with statistical data is presented by missing values: a concept foreign to many other languages. Besides regular values, each vector type in \R except for \texttt{raw} can hold \texttt{NA} as a value. Vectors of type \texttt{double} and \texttt{complex} define three additional types of non finite values: \texttt{NaN}, \texttt{Inf} and \texttt{-Inf}. The \JSON format does not natively support any of these types; therefore such values values need to be encoded in some other way. There are two obvious approaches. The first one is to use the \JSON \texttt{null} type. For example: <<>>= x <- c(TRUE, FALSE, NA) toJSON(x) @ The other option is to encode missing values as strings by wrapping them in double quotes: <<>>= x <- c(1,2,NA,NaN,Inf,10) toJSON(x) @ Both methods result in valid \JSON, but both have a limitation: the problem with the \texttt{null} type is that it is impossible to distinguish between different types of missing data, which could be a problem for numeric vectors. The values \texttt{Inf}, \texttt{-Inf}, \texttt{NA} and \texttt{NaN} carry different meanings, and these should not get lost in the encoding. The problem with encoding missing values as strings is that this method can not be used for character vectors, because the consumer won't be able to distinguish the actual string \texttt{"NA"} and the missing value \texttt{NA}. This would create a likely source of bugs, where clients mistakenly interpret \texttt{"NA"} as an actual string value, which is a common problem with text-based formats such as \texttt{CSV}. For this reason, \jsonlite uses the following defaults: \begin{itemize} \item Missing values in non-numeric vectors (\texttt{logical}, \texttt{character}) are encoded as \texttt{null}. \item Missing values in numeric vectors (\texttt{double}, \texttt{integer}, \texttt{complex}) are encoded as strings. \end{itemize} We expect that these conventions are most likely to result in the correct interpretation of missing values. Some examples: <<>>= toJSON(c(TRUE, NA, NA, FALSE)) toJSON(c("FOO", "BAR", NA, "NA")) toJSON(c(3.14, NA, NaN, 21, Inf, -Inf)) #Non-default behavior toJSON(c(3.14, NA, NaN, 21, Inf, -Inf), na="null") @ \subsubsection{Special vector types: dates, times, factor, complex} Besides missing values, \JSON also lacks native support for some of the basic vector types in \R that frequently appear in data sets. These include vectors of class \texttt{Date}, \texttt{POSIXt} (timestamps), \texttt{factors} and \texttt{complex} vectors. By default, the \jsonlite package coerces these types to strings (using \texttt{as.character}): <<>>= toJSON(Sys.time() + 1:3) toJSON(as.Date(Sys.time()) + 1:3) toJSON(factor(c("foo", "bar", "foo"))) toJSON(complex(real=runif(3), imaginary=rnorm(3))) @ When parsing such \JSON strings, these values will appear as character vectors. In order to obtain the original types, the user needs to manually coerce them back to the desired type using the corresponding \texttt{as} function, e.g. \code{as.POSIXct}, \code{as.Date}, \code{as.factor} or \code{as.complex}. In this respect, \JSON is subject to the same limitations as text based formats such as \texttt{CSV}. \subsubsection{Special cases: vectors of length 0 or 1} Two edge cases deserve special attention: vectors of length 0 and vectors of length 1. In \jsonlite these are encoded respectively as an empty array, and an array of length 1: <<>>= #vectors of length 0 and 1 toJSON(vector()) toJSON(pi) #vectors of length 0 and 1 in a named list toJSON(list(foo=vector())) toJSON(list(foo=pi)) #vectors of length 0 and 1 in an unnamed list toJSON(list(vector())) toJSON(list(pi)) @ This might seem obvious but these cases result in very different behavior between different \JSON packages. This is probably caused by the fact that \R does not have a scalar type, and some package authors decided to treat vectors of length 1 as if they were a scalar. For example, in the current implementations, both \RJSONIO and \pkg{rjson} encode a vector of length one as a \JSON primitive when it appears within a list: <<>>= # Other packages make different choices: cat(rjson::toJSON(list(n = c(1)))) cat(rjson::toJSON(list(n = c(1, 2)))) @ When encoding a single dataset this seems harmless, but in the context of dynamic data this inconsistency is almost guaranteed to cause bugs. For example, imagine an \R web service which lets the user fit a linear model and sends back the fitted parameter estimates as a \JSON array. The client code then parses the \JSON, and iterates over the array of coefficients to display them in a \texttt{GUI}. All goes well, until the user decides to fit a model with only one predictor. If the \JSON encoder suddenly returns a primitive value where the client is expecting an array, the application will likely break. Therefore, any consumer or client would need to be aware of the special case where the vector becomes a primitive, and explicitly take this exception into account when processing the result. When the client fails to do so and proceeds as usual, it will probably call an iterator or loop method on a primitive value, resulting in the obvious errors. To avoid this, \jsonlite uses consistent encoding schemes which do not depend on variable object properties such as its length. Hence, a vector is always encoded as an array, even when it is of length 0 or 1. \subsection{Matrices} Arguably one of the strongest sides of \R is its ability to interface libraries for basic linear algebra subprograms \citep{lawson1979basic} such as \texttt{LAPACK} \citep{anderson1999lapack}. These libraries provide well tuned, high performance implementations of important linear algebra operations to calculate anything from inner products and eigen values to singular value decompositions, which are in turn building blocks of statistical methods such as linear regression or principal component analysis. Linear algebra methods operate on \emph{matrices}, making the matrix one of the most central data classes in \R. Conceptually, a matrix consists of a 2 dimensional structure of homogeneous values. It is indexed using 2 numbers (or vectors), representing the rows and columns of the matrix respectively. <<>>= x <- matrix(1:12, nrow=3, ncol=4) print(x) print(x[2,4]) @ A matrix is stored in memory as a single atomic vector with an attribute called \texttt{"dim"} defining the dimensions of the matrix. The product of the dimensions is equal to the length of the vector. <<>>= attributes(volcano) length(volcano) @ Even though the matrix is stored as a single vector, the way it is printed and indexed makes it conceptually a 2 dimensional structure. In \jsonlite a matrix maps to an array of equal-length subarrays: <<>>= x <- matrix(1:12, nrow=3, ncol=4) toJSON(x) @ We expect this representation will be the most intuitive to interpret, also within languages that do not have a native notion of a matrix. Note that even though \R stores matrices in \emph{column major} order, \jsonlite encodes matrices in \emph{row major} order. This is a more conventional and intuitive way to represent matrices and is consistent with the row-based encoding of data frames discussed in the next section. When the \JSON string is properly indented (recall that white space and line breaks are optional in \JSON), it looks very similar to the way \R prints matrices: \begin{verbatim} [ [ 1, 4, 7, 10 ], [ 2, 5, 8, 11 ], [ 3, 6, 9, 12 ] ] \end{verbatim} Because the matrix is implemented in \R as an atomic vector, it automatically inherits the conventions mentioned earlier with respect to edge cases and missing values: <<>>= x <- matrix(c(1,2,4,NA), nrow=2) toJSON(x) toJSON(x, na="null") toJSON(matrix(pi)) @ \subsubsection{Matrix row and column names} Besides the \texttt{"dim"} attribute, the matrix class has an additional, optional attribute: \texttt{"dimnames"}. This attribute holds names for the rows and columns in the matrix. However, we decided not to include this information in the default \JSON mapping for matrices for several reasons. First of all, because this attribute is optional, either row or column names or both could be \texttt{NULL}. This makes it difficult to define a practical mapping that covers all cases with and without row and/or column names. Secondly, the names in matrices are mostly there for annotation only; they are not actually used in calculations. The linear algebra subroutines mentioned before completely ignore them, and never include any names in their output. So there is often little purpose of setting names in the first place, other than annotation. When row or column names of a matrix seem to contain vital information, we might want to transform the data into a more appropriate structure. \cite{tidydata} calls this \emph{``tidying''} the data and outlines best practices on storing statistical data in its most appropriate form. He lists the issue where \emph{``column headers are values, not variable names''} as the most common source of untidy data. This often happens when the structure is optimized for presentation (e.g. printing), rather than computation. In the following example taken from Wickham, the predictor variable (treatment) is stored in the column headers rather than the actual data. As a result, these values do not get included in the \JSON output: <<>>= x <- matrix(c(NA,1,2,5,NA,3), nrow=3) row.names(x) <- c("Joe", "Jane", "Mary"); colnames(x) <- c("Treatment A", "Treatment B") print(x) toJSON(x) @ Wickham recommends that the data be \emph{melted} into its \emph{tidy} form. Once the data is tidy, the \JSON encoding will naturally contain the treatment values: <<>>= library(reshape2) y <- melt(x, varnames=c("Subject", "Treatment")) print(y) toJSON(y, pretty=TRUE) @ In some other cases, the column headers actually do contain variable names, and melting is inappropriate. For data sets with records consisting of a set of named columns (fields), \R has more natural and flexible class: the data-frame. The \toJSON method for data frames (described later) is more suitable when we want to refer to rows or fields by their name. Any matrix can easily be converted to a data-frame using the \code{as.data.frame} function: <<>>= toJSON(as.data.frame(x), pretty=TRUE) @ For some cases this results in the desired output, but in this example melting seems more appropriate. \subsection{Lists} The \texttt{list} is the most general purpose data structure in \R. It holds an ordered set of elements, including other lists, each of arbitrary type and size. Two types of lists are distinguished: named lists and unnamed lists. A list is considered a named list if it has an attribute called \texttt{"names"}. In practice, a named list is any list for which we can access an element by its name, whereas elements of an unnamed lists can only be accessed using their index number: <<>>= mylist1 <- list("foo" = 123, "bar"= 456) print(mylist1$bar) mylist2 <- list(123, 456) print(mylist2[[2]]) @ \subsubsection{Unnamed lists} Just like vectors, an unnamed list maps to a \JSON array: <<>>= toJSON(list(c(1,2), "test", TRUE, list(c(1,2)))) @ Note that even though both vectors and lists are encoded using \JSON arrays, they can be distinguished from their contents: an \R vector results in a \JSON array containing only primitives, whereas a list results in a \JSON array containing only objects and arrays. This allows the \JSON parser to reconstruct the original type from encoded vectors and arrays: <<>>= x <- list(c(1,2,NA), "test", FALSE, list(foo="bar")) identical(fromJSON(toJSON(x)), x) @ The only exception is the empty list and empty vector, which are both encoded as \texttt{[ ]} and therefore indistinguishable, but this is rarely a problem in practice. \subsubsection{Named lists} A named list in \R maps to a \JSON \emph{object}: <<>>= toJSON(list(foo=c(1,2), bar="test")) @ Because a list can contain other lists, this works recursively: <>= toJSON(list(foo=list(bar=list(baz=pi)))) @ Named lists map almost perfectly to \JSON objects with one exception: list elements can have empty names: <<>>= x <- list(foo=123, "test", TRUE) attr(x, "names") x$foo x[[2]] @ In a \JSON object, each element in an object must have a valid name. To ensure this property, \jsonlite uses the same solution as the \code{print} method, which is to fall back on indices for elements that do not have a proper name: <<>>= x <- list(foo=123, "test", TRUE) print(x) toJSON(x) @ This behavior ensures that all generated \JSON is valid, however named lists with empty names should be avoided where possible. When actually designing \R objects that should be interoperable, it is recommended that each list element is given a proper name. \subsection{Data frame} The \texttt{data frame} is perhaps the most central data structure in \R from the user point of view. This class holds tabular data in which each column is named and (usually) homogeneous. Conceptually it is very similar to a table in relational data bases such as \texttt{MySQL}, where \emph{fields} are referred to as \emph{column names}, and \emph{records} are called \emph{rows}. Like a matrix, a data frame can be subsetted with two indices, to extract certain rows and columns of the data: <<>>= is(iris) names(iris) print(iris[1:3, c(1,5)]) print(iris[1:3, c("Sepal.Width", "Species")]) @ For the previously discussed classes such as vectors and matrices, behavior of \jsonlite was quite similar to the other available packages that implement \toJSON and \fromJSON functions, with only minor differences for missing values and edge cases. But when it comes to data frames, \jsonlite takes a completely different approach. The behavior of \jsonlite is designed for compatibility with conventional ways of encoding table-like structures outside the \R community. The implementation is more involved, but results in a powerful and more natural way of representing data frames in \JSON. \subsubsection{Column based versus row based tables} Generally speaking, tabular data structures can be implemented in two different ways: in a column based, or row based fashion. A column based structure consists of a named collection of equal-length, homogeneous arrays representing the table columns. In a row-based structure on the other hand, the table is implemented as a set of heterogeneous associative arrays representing table rows with field values for each particular record. Even though most languages provide flexible and abstracted interfaces that hide these implementation details from the user, they can have huge implications for performance. A column based structure is efficient for inserting or extracting certain columns of the data, but it is inefficient for manipulating individual rows. For example to insert a single row somewhere in the middle, each of the columns has to be sliced and stitched back together. For row-based implementations, it is the exact other way around: we can easily manipulate a particular record, but to insert/extract a whole column we would need to iterate over all records in the table and read/modify the appropriate field in each of them. The data frame class in \R is implemented in a column based fashion: it constitutes of a \texttt{named list} of equal-length vectors. Thereby the columns in the data frame naturally inherit the properties from atomic vectors discussed before, such as homogeneity, missing values, etc. Another argument for column-based implementation is that statistical methods generally operate on columns. For example, the \code{lm} function fits a \emph{linear regression} by extracting the columns from a data frame as specified by the \texttt{formula} argument. \R simply binds the specified columns together into a matrix $X$ and calls out to a highly optimized \proglang{FORTRAN} subroutine to calculate the OLS estimates $\hat{\beta} = (X^TX)X^Ty$ using the $QR$ factorization of $X$. Many other statistical modeling functions follow similar steps, and are computationally efficient because of the column-based data storage in \R. Unfortunately \R is an exception in its preference for column-based storage: most languages, systems, databases, \API's, etc, are optimized for record based operations. For this reason, the conventional way to store and communicate tabular data in \JSON seems to almost exclusively row based. This discrepancy presents various complications when converting between data frames and \JSON. The remaining of this section discusses details and challenges of consistently mapping record based \JSON data as frequently encountered on the web, into column-based data frames which are convenient for statistical computing. \subsubsection{Row based data frame encoding} The encoding of data frames is one of the major differences between \jsonlite and implementations from other currently available packages. Instead of using the column-based encoding also used for lists, \jsonlite maps data frames by default to an array of records: <<>>= toJSON(iris[1:2,], pretty=TRUE) @ This output looks a bit like a list of named lists. However, there is one major difference: the individual records contain \JSON primitives, whereas lists always contain \JSON objects or arrays: <<>>= toJSON(list(list(Species="Foo", Width=21)), pretty=TRUE) @ This leads to the following convention: when encoding \R objects, \JSON primitives only appear in vectors and data-frame rows. Primitives within a \JSON array indicate a vector, and primitives appearing inside a \JSON object indicate a data-frame row. A \JSON encoded \texttt{list}, (named or unnamed) will never contain \JSON primitives. This is a subtle but important convention that helps to distinguish between \R classes from their \JSON representation, without explicitly encoding any metadata. \subsubsection{Missing values in data frames} The section on atomic vectors discussed two methods of encoding missing data appearing in a vector: either using strings or using the \JSON \texttt{null} type. When a missing value appears in a data frame, there is a third option: simply not include this field in \JSON record: <<>>= x <- data.frame(foo=c(FALSE, TRUE,NA,NA), bar=c("Aladdin", NA, NA, "Mario")) print(x) toJSON(x, pretty=TRUE) @ The default behavior of \jsonlite is to omit missing data from records in a data frame. This seems to be the most conventional method used on the web, and we expect this encoding will most likely lead to the correct interpretation of \emph{missingness}, even in languages without an explicit notion of \texttt{NA}. \subsubsection{Relational data: nested records} Nested datasets are somewhat unusual in \R, but frequently encountered in \JSON. Such structures do not really fit the vector based paradigm which makes them harder to manipulate in \R. However, nested structures are too common in \JSON to ignore, and with a little work most cases still map to a data frame quite nicely. The most common scenario is a dataset in which a certain field within each record contains a \emph{subrecord} with additional fields. The \jsonlite implementation maps these subrecords to a nested data frame. Whereas the data frame class usually consists of vectors, technically a column can also be list or another data frame with matching dimension (this stretches the meaning of the word ``column'' a bit): <>= options(stringsAsFactors=FALSE) x <- data.frame(driver = c("Bowser", "Peach"), occupation = c("Koopa", "Princess")) x$vehicle <- data.frame(model = c("Piranha Prowler", "Royal Racer")) x$vehicle$stats <- data.frame(speed = c(55, 34), weight = c(67, 24), drift = c(35, 32)) str(x) toJSON(x, pretty=TRUE) myjson <- toJSON(x) y <- fromJSON(myjson) identical(x,y) @ When encountering \JSON data containing nested records on the web, chances are that these data were generated from \emph{relational} database. The \JSON field containing a subrecord represents a \emph{foreign key} pointing to a record in an external table. For the purpose of encoding these into a single \JSON structure, the tables were joined into a nested structure. The directly nested subrecord represents a \emph{one-to-one} or \emph{many-to-one} relation between the parent and child table, and is most naturally stored in \R using a nested data frame. In the example above, the \texttt{vehicle} field points to a table of vehicles, which in turn contains a \texttt{stats} field pointing to a table of stats. When there is no more than one subrecord for each record, we easily \emph{flatten} the structure into a single non-nested data frame. <<>>= y <- fromJSON(myjson, flatten=TRUE) str(y) @ \subsubsection{Relational data: nested tables} The one-to-one relation discussed above is relatively easy to store in \R, because each record contains at most one subrecord. Therefore we can use either a nested data frame, or flatten the data frame. However, things get more difficult when \JSON records contain a field with a nested array. Such a structure appears in relational data in case of a \emph{one-to-many} relation. A standard textbook illustration is the relation between authors and titles. For example, a field can contain an array of values: <>= x <- data.frame(author = c("Homer", "Virgil", "Jeroen")) x$poems <- list(c("Iliad", "Odyssey"), c("Eclogues", "Georgics", "Aeneid"), vector()); names(x) toJSON(x, pretty = TRUE) @ As can be seen from the example, the way to store this in a data frame is using a list of character vectors. This works, and although unconventional, we can still create and read such structures in \R relatively easily. However, in practice the one-to-many relation is often more complex. It results in fields containing a \emph{set of records}. In \R, the only way to model this is as a column containing a list of data frames, one separate data frame for each row: <>= x <- data.frame(author = c("Homer", "Virgil", "Jeroen")) x$poems <- list( data.frame(title=c("Iliad", "Odyssey"), year=c(-1194, -800)), data.frame(title=c("Eclogues", "Georgics", "Aeneid"), year=c(-44, -29, -19)), data.frame() ) toJSON(x, pretty=TRUE) @ Because \R doesn't have native support for relational data, there is no natural class to store such structures. The best we can do is a column containing a list of sub-dataframes. This does the job, and allows the \R user to access or generate nested \JSON structures. However, a data frame like this cannot be flattened, and the class does not guarantee that each of the individual nested data frames contain the same fields, as would be the case in an actual relational data base. \section{Structural consistency and type safety in dynamic data} Systems that automatically exchange information over some interface, protocol or \API require well defined and unambiguous meaning and arrangement of data. In order to process and interpret input and output, contents must obey a steady structure. Such structures are usually described either informally in documentation or more formally in a schema language. The previous section emphasized the importance of consistency in the mapping between \JSON data and \R classes. This section takes a higher level view and explains the importance of structure consistency for dynamic data. This topic can be a bit subtle because it refers to consistency among different instantiations of a \JSON structure, rather than a single case. We try to clarify by breaking down the concept into two important parts, and illustrate with analogies and examples from \R. \subsection{Classes, types and data} Most object-oriented languages are designed with the idea that all objects of a certain class implement the same fields and methods. In strong-typed languages such as \proglang{S4} or \proglang{Java}, names and types of the fields are formally declared in a class definition. In other languages such as \proglang{S3} or \proglang{JavaScript}, the fields are not enforced by the language but rather at the discretion of the programmer. One way or another they assume that members of a certain class agree on field names and types, so that the same methods can be applied to any object of a particular class. This basic principle holds for dynamic data exactly the same way as for objects. Software that process dynamic data can only work reliably if the various elements of the data have consistent names and structure. Consensus must exist between the different parties on data that is exchanged as part an interface or protocol. This requires the structure to follow some sort of template that specifies which attributes can appear in the data, what they mean and how they are composed. Thereby each possible scenario can be accounted for in the software so that data can be interpreted and processed appropriately with no exceptions during run-time. Some data interchange formats such as \texttt{XML} or \texttt{Protocol Buffers} take a formal approach to this matter, and have well established \emph{schema languages} and \emph{interface description languages}. Using such a meta language it is possible to define the exact structure, properties and actions of data interchange in a formal arrangement. However, in \JSON, such formal definitions are relatively uncommon. Some initiatives for \JSON schema languages exist \citep{jsonschema}, but they are not very well established and rarely seen in practice. One reason for this might be that defining and implementing formal schemas is complicated and a lot of work which defeats the purpose of using an lightweight format such as \JSON in the first place. But another reason is that it is often simply not necessary to be overly formal. The \JSON format is simple and intuitive, and under some general conventions, a well chosen example can suffice to characterize the structure. This section describes two important rules that are required to ensure that data exchange using \JSON is type safe. \subsection{Rule 1: Fixed keys} When using \JSON without a schema, there are no restrictions on the keys (field names) that can appear in a particular object. However, a source of data that returns a different set of keys every time it is called makes it very difficult to write software to process these data. Hence, the first rule is to limit \JSON interfaces to a finite set of keys that are known \emph{a priory} by all parties. It can be helpful to think about this in analogy with for example a relational database. Here, the database model separates the data from metadata. At run time, records can be inserted or deleted, and a certain query might return different content each time it is executed. But for a given query, each execution will return exactly the same \emph{field names}; hence as long as the table definitions are unchanged, the \emph{structure} of the output consistent. Client software needs this structure to validate input, optimize implementation, and process each part of the data appropriately. In \JSON, data and metadata are not formally separated as in a database, but similar principles that hold for fields in a database, apply to keys in dynamic \JSON data. A beautiful example of this in practice was given by Mike Dewar at the New York Open Statistical Programming Meetup on Jan. 12, 2012 \citep{jsonkeys}. In his talk he emphasizes to use \JSON keys only for \emph{names}, and not for \emph{data}. He refers to this principle as the ``golden rule'', and explains how he learned his lesson the hard way. In one of his early applications, timeseries data was encoded by using the epoch timestamp as the \JSON key. Therefore the keys are different each time the query is executed: \begin{verbatim} [ { "1325344443" : 124 }, { "1325344456" : 131 }, { "1325344478" : 137 } ] \end{verbatim} Even though being valid \JSON, dynamic keys as in the example above are likely to introduce trouble. Most software will have great difficulty processing these values if we can not specify the keys in the code. Moreover when documenting the API, either informally or formally using a schema language, we need to describe for each property in the data what the value means and is composed of. Thereby a client or consumer can implement code that interprets and process each element in the data in an appropriate manner. Both the documentation and interpretation of \JSON data rely on fixed keys with well defined meaning. Also note that the structure is difficult to extend in the future. If we want to add an additional property to each observation, the entire structure needs to change. In his talk, Dewar explains that life gets much easier when we switch to the following encoding: \begin{verbatim} [ { "time": "1325344443" : "price": 124 }, { "time": "1325344456" : "price": 131 }, { "time": "1325344478" : "price": 137 } ] \end{verbatim} This structure will play much nicer with existing software that assumes fixed keys. Moreover, the structure can easily be described in documentation, or captured in a schema. Even when we have no intention of writing documentation or a schema for a dynamic \JSON source, it is still wise to design the structure in such away that it \emph{could} be described by a schema. When the keys are fixed, a well chosen example can provide all the information required for the consumer to implement client code. Also note that the new structure is extensible: additional properties can be added to each observation without breaking backward compatibility. In the context of \R, consistency of keys is closely related to Wikcham's concept of \emph{tidy data} discussed earlier. Wickham states that the most common reason for messy data are column headers containing values instead of variable names. Column headers in tabular datasets become keys when converted to \JSON. Therefore, when headers are actually values, \JSON keys contain in fact data and can become unpredictable. The cure to inconsistent keys is almost always to tidy the data according to recommendations given by \cite{tidydata}. \subsection{Rule 2: Consistent types} In a strong typed language, fields declare their class before any values are assigned. Thereby the type of a given field is identical in all objects of a particular class, and arrays only contain objects of a single type. The \proglang{S3} system in \R is weakly typed and puts no formal restrictions on the class of a certain properties, or the types of objects that can be combined into a collection. For example, the list below contains a character vector, a numeric vector and a list: <<>>= #Heterogeneous lists are bad! x <- list("FOO", 1:3, list("bar"=pi)) toJSON(x) @ However even though it is possible to generate such \JSON, it is bad practice. Fields or collections with ambiguous object types are difficult to describe, interpret and process in the context of inter-system communication. When using \JSON to exchange dynamic data, it is important that each property and array is \emph{type consistent}. In dynamically typed languages, the programmer needs to make sure that properties are of the correct type before encoding into \JSON. For \R, this means that the \texttt{unnamed lists} type is best avoided when designing interoperable structures because this type is not homogeneous. Note that consistency is somewhat subjective as it refers to the \emph{meaning} of the elements; they do not necessarily have precisely the same structure. What is important is to keep in mind that the consumer of the data can interpret and process each element identically, e.g. iterate over the elements in the collection and apply the same method to each of them. To illustrate this, lets take the example of the data frame: <<>>= #conceptually homogenous array x <- data.frame(name=c("Jay", "Mary", NA, NA), gender=c("M", NA, NA, "F")) toJSON(x, pretty=TRUE) @ The \JSON array above has 4 elements, each of which a \JSON object. However, due to the \texttt{NA} values, some records have more fields than others. But as long as they are conceptually the same type (e.g. a person), the consumer can iterate over the elements to process each person in the set according to a predefined action. For example each element could be used to construct a \texttt{Person} object. A collection of different object classes should be separated and organized using a named list: <>= x <- list( humans = data.frame(name = c("Jay", "Mary"), married = c(TRUE, FALSE)), horses = data.frame(name = c("Star", "Dakota"), price = c(5000, 30000)) ) toJSON(x, pretty=TRUE) @ This might seem obvious, but dynamic languages such as \R can make it dangerously tempting to generate data containing mixed-type collections. Such inconsistent typing makes it very difficult to consume the data and creates a likely source of nasty bugs. Using consistent field names/types and homogeneous \JSON arrays is a strong convention among public \JSON \API's, for good reasons. We recommend \R users to respect these conventions when generating \JSON data in \R. %references \bibliographystyle{plainnat} \bibliography{references} %end \end{document} jsonlite/vignettes/json-paging.Rmd0000644000176200001440000002225512602263527017004 0ustar liggesusers--- title: "Combining pages of JSON data with jsonlite" date: "2015-09-06" output: html_document vignette: > %\VignetteIndexEntry{Combining pages of JSON data with jsonlite} %\VignetteEngine{knitr::rmarkdown} \usepackage[utf8]{inputenc} --- The [jsonlite](https://cran.r-project.org/package=jsonlite) package is a `JSON` parser/generator for R which is optimized for pipelines and web APIs. It is used by the OpenCPU system and many other packages to get data in and out of R using the `JSON` format. ## A bidirectional mapping One of the main strengths of `jsonlite` is that it implements a bidirectional [mapping](http://arxiv.org/abs/1403.2805) between JSON and data frames. Thereby it can convert nested collections of JSON records, as they often appear on the web, immediately into the appropriate R structure. For example to grab some data from ProPublica we can simply use: ```r library(jsonlite) mydata <- fromJSON("https://projects.propublica.org/forensics/geos.json", flatten = TRUE) View(mydata) ``` The `mydata` object is a data frame which can be used directly for modeling or visualization, without the need for any further complicated data manipulation. ## Paging with jsonlite A question that comes up frequently is how to combine pages of data. Most web APIs limit the amount of data that can be retrieved per request. If the client needs more data than what can fits in a single request, it needs to break down the data into multiple requests that each retrieve a fragment (page) of data, not unlike pages in a book. In practice this is often implemented using a `page` parameter in the API. Below an example from the [ProPublica Nonprofit Explorer API](http://projects.propublica.org/nonprofits/api) where we retrieve the first 3 pages of tax-exempt organizations in the USA, ordered by revenue: ```r baseurl <- "https://projects.propublica.org/nonprofits/api/v1/search.json?order=revenue&sort_order=desc" mydata0 <- fromJSON(paste0(baseurl, "&page=0"), flatten = TRUE) mydata1 <- fromJSON(paste0(baseurl, "&page=1"), flatten = TRUE) mydata2 <- fromJSON(paste0(baseurl, "&page=2"), flatten = TRUE) #The actual data is in the filings element mydata0$filings[1:10, c("organization.sub_name", "organization.city", "totrevenue")] ``` ``` organization.sub_name organization.city 1 KAISER FOUNDATION HEALTH PLAN INC OAKLAND 2 KAISER FOUNDATION HEALTH PLAN INC OAKLAND 3 KAISER FOUNDATION HEALTH PLAN INC OAKLAND 4 DAVIDSON COUNTY COMMUNITY COLLEGE FOUNDATION INC LEXINGTON 5 KAISER FOUNDATION HOSPITALS OAKLAND 6 KAISER FOUNDATION HOSPITALS OAKLAND 7 KAISER FOUNDATION HOSPITALS OAKLAND 8 PARTNERS HEALTHCARE SYSTEM INC CHARLESTOWN 9 PARTNERS HEALTHCARE SYSTEM INC CHARLESTOWN 10 PARTNERS HEALTHCARE SYSTEM INC CHARLESTOWN totrevenue 1 42346486950 2 40148558254 3 37786011714 4 30821445312 5 20013171194 6 18543043972 7 17980030355 8 10619215354 9 10452560305 10 9636630380 ``` To analyze or visualize these data, we need to combine the pages into a single dataset. We can do this with the `rbind.pages` function. Note that in this example, the actual data is contained by the `filings` field: ```r #Rows per data frame nrow(mydata0$filings) ``` ``` [1] 25 ``` ```r #Combine data frames filings <- rbind.pages( list(mydata0$filings, mydata1$filings, mydata2$filings) ) #Total number of rows nrow(filings) ``` ``` [1] 75 ``` ## Automatically combining many pages We can write a simple loop that automatically downloads and combines many pages. For example to retrieve the first 20 pages with non-profits from the example above: ```r #store all pages in a list first baseurl <- "https://projects.propublica.org/nonprofits/api/v1/search.json?order=revenue&sort_order=desc" pages <- list() for(i in 0:20){ mydata <- fromJSON(paste0(baseurl, "&page=", i)) message("Retrieving page ", i) pages[[i+1]] <- mydata$filings } #combine all into one filings <- rbind.pages(pages) #check output nrow(filings) ``` ``` [1] 525 ``` ```r colnames(filings) ``` ``` [1] "tax_prd" "tax_prd_yr" [3] "formtype" "pdf_url" [5] "updated" "totrevenue" [7] "totfuncexpns" "totassetsend" [9] "totliabend" "pct_compnsatncurrofcr" [11] "tax_pd" "subseccd" [13] "unrelbusinccd" "initiationfees" [15] "grsrcptspublicuse" "grsincmembers" [17] "grsincother" "totcntrbgfts" [19] "totprgmrevnue" "invstmntinc" [21] "txexmptbndsproceeds" "royaltsinc" [23] "grsrntsreal" "grsrntsprsnl" [25] "rntlexpnsreal" "rntlexpnsprsnl" [27] "rntlincreal" "rntlincprsnl" [29] "netrntlinc" "grsalesecur" [31] "grsalesothr" "cstbasisecur" [33] "cstbasisothr" "gnlsecur" [35] "gnlsothr" "netgnls" [37] "grsincfndrsng" "lessdirfndrsng" [39] "netincfndrsng" "grsincgaming" [41] "lessdirgaming" "netincgaming" [43] "grsalesinvent" "lesscstofgoods" [45] "netincsales" "miscrevtot11e" [47] "compnsatncurrofcr" "othrsalwages" [49] "payrolltx" "profndraising" [51] "txexmptbndsend" "secrdmrtgsend" [53] "unsecurednotesend" "retainedearnend" [55] "totnetassetend" "nonpfrea" [57] "gftgrntsrcvd170" "txrevnuelevied170" [59] "srvcsval170" "grsinc170" [61] "grsrcptsrelated170" "totgftgrntrcvd509" [63] "grsrcptsadmissn509" "txrevnuelevied509" [65] "srvcsval509" "subtotsuppinc509" [67] "totsupp509" "ein" [69] "organization" "eostatus" [71] "tax_yr" "operatingcd" [73] "assetcdgen" "transinccd" [75] "subcd" "grscontrgifts" [77] "intrstrvnue" "dividndsamt" [79] "totexcapgn" "totexcapls" [81] "grsprofitbus" "otherincamt" [83] "compofficers" "contrpdpbks" [85] "totrcptperbks" "totexpnspbks" [87] "excessrcpts" "totexpnsexempt" [89] "netinvstinc" "totaxpyr" [91] "adjnetinc" "invstgovtoblig" [93] "invstcorpstk" "invstcorpbnd" [95] "totinvstsec" "fairmrktvalamt" [97] "undistribincyr" "cmpmininvstret" [99] "sec4940notxcd" "sec4940redtxcd" [101] "infleg" "contractncd" [103] "claimstatcd" "propexchcd" [105] "brwlndmnycd" "furngoodscd" [107] "paidcmpncd" "trnsothasstscd" [109] "agremkpaycd" "undistrinccd" [111] "dirindirintcd" "invstjexmptcd" [113] "propgndacd" "excesshldcd" [115] "grntindivcd" "nchrtygrntcd" [117] "nreligiouscd" "grsrents" [119] "costsold" "totrcptnetinc" [121] "trcptadjnetinc" "topradmnexpnsa" [123] "topradmnexpnsb" "topradmnexpnsd" [125] "totexpnsnetinc" "totexpnsadjnet" [127] "othrcashamt" "mrtgloans" [129] "othrinvstend" "fairmrktvaleoy" [131] "mrtgnotespay" "tfundnworth" [133] "invstexcisetx" "sect511tx" [135] "subtitleatx" "esttaxcr" [137] "txwithldsrc" "txpaidf2758" [139] "erronbkupwthld" "estpnlty" [141] "balduopt" "crelamt" [143] "tfairmrktunuse" "distribamt" [145] "adjnetinccola" "adjnetinccolb" [147] "adjnetinccolc" "adjnetinccold" [149] "adjnetinctot" "qlfydistriba" [151] "qlfydistribb" "qlfydistribc" [153] "qlfydistribd" "qlfydistribtot" [155] "valassetscola" "valassetscolb" [157] "valassetscolc" "valassetscold" [159] "valassetstot" "qlfyasseta" [161] "qlfyassetb" "qlfyassetc" [163] "qlfyassetd" "qlfyassettot" [165] "endwmntscola" "endwmntscolb" [167] "endwmntscolc" "endwmntscold" [169] "endwmntstot" "totsuprtcola" [171] "totsuprtcolb" "totsuprtcolc" [173] "totsuprtcold" "totsuprttot" [175] "pubsuprtcola" "pubsuprtcolb" [177] "pubsuprtcolc" "pubsuprtcold" [179] "pubsuprttot" "grsinvstinca" [181] "grsinvstincb" "grsinvstincc" [183] "grsinvstincd" "grsinvstinctot" ``` From here, we can go straight to analyzing the filings data without any further tedious data manipulation. jsonlite/vignettes/precompile.R0000644000176200001440000000037212573051442016400 0ustar liggesusers#Vignettes that depend on internet access have been precompiled: library(knitr) knit("vignettes/json-apis.Rmd.orig", "vignettes/json-apis.Rmd") knit("vignettes/json-paging.Rmd.orig", "vignettes/json-paging.Rmd") library(devtools) build_vignettes() jsonlite/MD50000644000176200001440000002230612626264003012415 0ustar liggesusers826885e9a9cd67f4c315131c481cf63f *DESCRIPTION 2b11af982e62e96e928587e70fe113fa *LICENSE 5a58c9e49d90f11642db23d72b8ce327 *NAMESPACE e442b90763067e25312b5689f3db8bd6 *NEWS 6392afdce07064ef77d4d49d02afb914 *R/apply_by_pages.R 14bcfc61e1a010f05f4ce98f3d714e64 *R/as.scalar.R f5128c6b69f745efe3478372c1edf744 *R/asJSON.AAAgeneric.R 8f9c49db93b95acea9964a6aeafe3f14 *R/asJSON.ANY.R 3c0c9d555f572fafe375a57b8ac3bccf *R/asJSON.AsIs.R d3cb1622378eee1386424dc9a6f869b8 *R/asJSON.Date.R 1c81ac6905684d593a92df6b2e4b999c *R/asJSON.NULL.R a268164dd6f2873d0286260f336c99e2 *R/asJSON.POSIXt.R 5adf2b5f2203c08e2ef0f38a25a1ad6e *R/asJSON.array.R 2ffd93fe6f8f8c0bd0600cda27a9e99b *R/asJSON.character.R b51612de3148fa36cacc82d1b856c7ad *R/asJSON.classRepresentation.R 5d732b0dce5caa77b2901ab38f730398 *R/asJSON.complex.R b3ea8259af7281920005d7b17924eee9 *R/asJSON.data.frame.R 557ff63ba0ec13de5b6f727627ec51f1 *R/asJSON.factor.R 9df01dee05497f6ca358b80169e8f9ae *R/asJSON.function.R b58441d8b3ee56dcf5c05aab05fedf92 *R/asJSON.int64.R 4cd5cff473a75152242c2c0c88e25245 *R/asJSON.json.R 41e3855be2ae4d4eb0f6578bfe009e0c *R/asJSON.list.R a6072971e1167adb5d077e5d7d9c50ea *R/asJSON.logical.R 328eead4cc7313d02a27b8260203a070 *R/asJSON.numeric.R b376c50c91caddd8a390e256d5ed9b13 *R/asJSON.pairlist.R fa461eada788c9231b8f486d0449c934 *R/asJSON.raw.R ff63b75529d018c9d22309715a6cd04e *R/asJSON.scalar.R 3a9b614527a562cef764817d5b029b62 *R/asJSON.ts.R af33fe6e18b089922504cd706e4c32a4 *R/base64.R 56915a86e78a7780df73a4c32e0cd9ad *R/cleannames.R 6fc02cc465a0cd7276a1426a0d1cb4f2 *R/collapse.R 441c4b477d55d6bf478c3354d5111bb5 *R/collapse_object.R d97f2b53cacf86184caf6959d1e46152 *R/deparse_vector.R 8f4a044a90bf62da1f4233dc49aedf2c *R/fixNativeSymbol.R 22a25cd71893e904b27ec45c16f259ad *R/flatten.R 33f443bbe70eb553a0c7c4c97017dccb *R/fromJSON.R c0ffae30c415067ec42c0c75b5fb43d1 *R/helpfunctions.R ddd8e726241c082235d01da679c08006 *R/is.recordlist.R 359fa06156183f02a19595b59d26ef54 *R/is.scalarlist.R f2a790afb805eab141a57244d7ae2525 *R/list_to_vec.R 2fdf2de3abf1a95c7728bad187a454ae *R/loadpkg.R a11832ba11c041a7278f1ab8baf591a8 *R/makesymbol.R 092b4a52b2af56d68b8e99e4b8cb9b32 *R/null_to_na.R e0379b523ef1d1d5608753796301590b *R/num_to_char.R 510f85cb798a5e7a61f1fba82bdbb221 *R/pack.R ec478d3b9e438dd9b21858fce1a49801 *R/parseJSON.R 0803a4e10e819cabb638d29739b4f7f0 *R/prettify.R bbf0a09478028541352f62ca3c1aa495 *R/print.R 1a41cb6d58b87c61fa7671af0c68e6ef *R/push_parser.R a13262093fcd5fe68cdf1b6db628b5bb *R/raw_to_json.R 584f420223f56249527284bec02eb915 *R/rbind.pages.R 4ea6b630845526cba478d0f6fe048e62 *R/serializeJSON.R 1dd24c1536f68288d0d4dddeb30d5fef *R/simplify.R ba01e3e30193969a46cb9c5ba74cc769 *R/simplifyDataFrame.R d735d795c263b8e9a6400df135a54190 *R/stop.R a03eca64105a2a09e234411a65e5ed4f *R/stream.R 456003c526a090942774cde7fefba02d *R/toJSON.R 241d61276b727fc1e7348dbeab742e47 *R/unbox.R d8505e361886fc0ac03d93b26eda8626 *R/unescape_unicode.R 77465bb5bcda44fec86bbe810150618e *R/utf8conv.R 4b1ef0cb1cd0814f6950b5d1d7ffb4ee *R/validate.R f63dc8cea8e10742970bb44db8bdd70e *R/warn_keep_vec_names.R 95f798e237f7baced8dcd900a07f4a76 *build/vignette.rds c96f0a5dcfd55f6e3e13c321629cb4dc *inst/CITATION d02344562338775bfd92f48090f8f623 *inst/doc/json-aaquickstart.R 59736f7a3e0e0f261b921bcea129edba *inst/doc/json-aaquickstart.Rmd 43583af54063abb63a1836a375b41b04 *inst/doc/json-aaquickstart.html 4e99d2b8c7a7580f31c1bf2ba2957a25 *inst/doc/json-apis.Rmd a03498e36b1a92f116b84127fa1b2854 *inst/doc/json-apis.html 719e45223ab9bff85c709ad0e0525d91 *inst/doc/json-mapping.pdf bf707572c3655753491b742bfef2fad0 *inst/doc/json-mapping.pdf.asis 41627461cb61033b6332d711e60761d8 *inst/doc/json-opencpu.R 5577cc27f9fe4d7d86faa3ca88b74f90 *inst/doc/json-opencpu.Rnw c0cc85bb1e3464bfc34ef878960312db *inst/doc/json-opencpu.pdf 738b15230beaf8317aafa65920f692bc *inst/doc/json-paging.Rmd 203f5e8307e41a4e573f219b7ef8bcc7 *inst/doc/json-paging.html b39b11c0272ca25f6818952bff533031 *inst/tests/flatten.R b1bae2329825cb18b0a1d9995714985a *inst/tests/helper-toJSON.R f23023e455638146ad63169f20d36803 *inst/tests/issues.txt 71ea8e4256bf3294c409c881681a934d *inst/tests/readme.txt 4cb91a98ddd47a10d233678ab5989d1a *inst/tests/test-fromJSON-NA-values.R c9f19e48a92535e5532f996cbbac02d0 *inst/tests/test-fromJSON-array.R 27be9afc0fa79345ad41d8ad53aa7200 *inst/tests/test-fromJSON-dataframe.R aa7791160baad2fd7fb3597ed808348a *inst/tests/test-fromJSON-datasets.R d75eee473a26e82255e677ba26520784 *inst/tests/test-fromJSON-date.R c6b5d8bad601bfe421a138faf26b7658 *inst/tests/test-fromJSON-matrix.R e237a83753f070f71421b860b91e7e72 *inst/tests/test-libjson-escaping.R cdc6699814eaee6709c09f96e965eace *inst/tests/test-libjson-large.R 4ca0c2309ec1ace0dfdefc9a1285df56 *inst/tests/test-libjson-utf8.R 5397e2ea1e806a04fb51e4eb59d34fd7 *inst/tests/test-libjson-validator.R 361c49618cf3486bd5437dd1bb41971f *inst/tests/test-network-Github.R cd499599d5494ca0fc5800f511409f8a *inst/tests/test-serializeJSON-datasets.R aeb2168457555594252b099a76bcaa2c *inst/tests/test-serializeJSON-functions.R 97e85447387747898b1ea5840f53c81a *inst/tests/test-serializeJSON-types.R b84e60041fc128cb5170f10e577c9ad5 *inst/tests/test-toJSON-AsIs.R eb4a97b650ff522419054f5ffaf71b5d *inst/tests/test-toJSON-Date.R 45420c7ed3efa57e9bae8a45784c7b02 *inst/tests/test-toJSON-NA-values.R ff5f8b6ee8ec115226bb319131e4b361 *inst/tests/test-toJSON-NULL-values.R 4712592e3c1bc94ca3a1c67e5c253242 *inst/tests/test-toJSON-POSIXt.R c9de8f6eb487ce445780eb3fbbf0209e *inst/tests/test-toJSON-complex.R 928cfa5544be5c01a6e91983f2e83e34 *inst/tests/test-toJSON-dataframe.R 83355d4d1aa22a0616da31644b30fa7d *inst/tests/test-toJSON-factor.R 6319bd28125018c6b955b284de617dec *inst/tests/test-toJSON-keep-vec-names.R 5a7f74f2f51703cdae5eed433b4ed5d4 *inst/tests/test-toJSON-logical.R fb28c7dc5dbd33ed9c9f4cb6d2d7ab01 *inst/tests/test-toJSON-matrix.R 0759a0a27f2346bb29cb13179ca5759d *inst/tests/test-toJSON-numeric.R b67ddf907b7eda8835e59b0cf944f1b3 *inst/tests/test-toJSON-raw.R bdad5ec4e8cd10c38cf233a8b1305daa *inst/tests/test-toJSON-zerovec.R 609172b33786e8d3ae0dab8450d21a0a *inst/tests/testS4.R e635dae3629bdc8929ad6333fb490933 *man/flatten.Rd 0f62f6e6c6685865bddaa8fc72efa0dc *man/fromJSON.Rd c11fc6155ec81aa0328ded58d25f9c21 *man/prettify.Rd a62c8f16af864d890e437afbec5b485b *man/rbind.pages.Rd 5ffee26ad9e5d035f91677488f3d088c *man/serializeJSON.Rd 29d3fc427815a7ea1c30d6f011a5cab4 *man/stream_in.Rd b061d1c30f4dc301d0dfc28cf98f1e8a *man/unbox.Rd 70dd43a31bfbdacef6f6688f48d02cba *man/validate.Rd 36086dda4d4fecd925d8e45f9805d25a *src/Makevars 766384995d24d4939dae31e5d955a3bb *src/base64.c 4dc5aee3f5803a9c71315f2d68e0d1b5 *src/base64.h faea98fda8fdbb6f78697710542c0fba *src/collapse_array.c 0ae4cfc11ff20bd4930f6b106d21ba7f *src/collapse_object.c 0549413f578a2afa06df08984092ce4d *src/collapse_pretty.c b8327031bf8e32e0ae0d18b53deb1990 *src/escape_chars.c f5b084b6a0394dcfb37917435d2480f3 *src/integer64_to_na.c ba977a53aa8e0ebd1bab8d9ba4c3276b *src/is_recordlist.c 09701a7eb31c40523d58cb06ecaba86b *src/is_scalarlist.c 2efe63deda72cbbd3ced4181899f214a *src/modp_numtoa.c a1a97f472d00c7f705a84e96feb7d352 *src/modp_numtoa.h b1d3deee3a4ba2960d857892991ad682 *src/null_to_na.c 1664105ca329631102383dadd72dec79 *src/num_to_char.c 5bb8438acc3c958a2724526e4a6674c2 *src/parse.c ca491c7e763e61bf82e17b66966ecc78 *src/prettify.c 8becd2a5c39b0f1705dc432f50d2ee87 *src/push_parser.c 4d8d6fb9043e7306cc526f267183313c *src/push_parser.h cd345103e29145d011bbcfdc3b5b1fa2 *src/row_collapse.c 53dd437fe1e446226980a8e3c2f24c8a *src/validate.c 82090dc44b5da981b6b39729bbd01b30 *src/yajl/api/yajl_common.h 8d59219a0f3e35778495a65c055c73f1 *src/yajl/api/yajl_gen.h 95792072fd28bbb37dfd4fb25f9ce893 *src/yajl/api/yajl_parse.h c1951a11b41bcd2f6fe97072fee6e182 *src/yajl/api/yajl_tree.h af5ebc5fff57f84064c2bc5c79420101 *src/yajl/api/yajl_version.h 9a58fd6f1c6be2244023dabcc77447d8 *src/yajl/readme.txt 6e750b3ec74e85174ed799cc944e931e *src/yajl/yajl.c 2be6b0133f8dc6190771e13b71eb0fda *src/yajl/yajl_alloc.c c283775f8a3dad48d89b6366588e1c6c *src/yajl/yajl_alloc.h 184cf783918db8355b385990f5bfbd27 *src/yajl/yajl_buf.c 6bf3dce93b04e488416f10bad1d37dd6 *src/yajl/yajl_buf.h 9666a608f17725d307cb6723a273ac3b *src/yajl/yajl_bytestack.h 22b4abce2656c3db32371fa2df1e256d *src/yajl/yajl_encode.c 0ddd919c7a1b2593b2cc2cdd41285aaf *src/yajl/yajl_encode.h 23f9d3424764408da72043650700144f *src/yajl/yajl_gen.c 0f011605c67b70bd5f54b2aeacc78e55 *src/yajl/yajl_lex.c 3ddc5742fd9bde4cc387bbae35c01236 *src/yajl/yajl_lex.h c3b70695f8a1225a9457a501e201c4f7 *src/yajl/yajl_parser.c 3a27147e92286b52862bf17665eda478 *src/yajl/yajl_parser.h 084ecc84d399e3a2b2227331ea145c7b *src/yajl/yajl_tree.c ab3de20370cc57144f8e1b449a3e2ab2 *src/yajl/yajl_version.c 669d289a39b2e8af6c90e562d5213d11 *tests/run-all.R 59736f7a3e0e0f261b921bcea129edba *vignettes/json-aaquickstart.Rmd 4e99d2b8c7a7580f31c1bf2ba2957a25 *vignettes/json-apis.Rmd b7f884b8f9b4306cf7af157808ec85cb *vignettes/json-apis.Rmd.orig 0e019ff3da3b7378d6f356311bf8d839 *vignettes/json-mapping.Rnw.orig bf707572c3655753491b742bfef2fad0 *vignettes/json-mapping.pdf.asis 5577cc27f9fe4d7d86faa3ca88b74f90 *vignettes/json-opencpu.Rnw 738b15230beaf8317aafa65920f692bc *vignettes/json-paging.Rmd c901571d19813378301caf291bf0f463 *vignettes/json-paging.Rmd.orig 3482b70f8858142fbce1f1f65310a696 *vignettes/precompile.R bd5d57d6cc98bc3ae5e157fd8eaaff2b *vignettes/references.bib jsonlite/build/0000755000176200001440000000000012626133701013201 5ustar liggesusersjsonlite/build/vignette.rds0000644000176200001440000000064212626133701015542 0ustar liggesusersS[O0N}򡾘h.d`k:(l]݊ool amw~ֽ4B*2Jp,uaׁʨ2 %!kf"IBiZS( skq.OHqM58̀:8fr&'L IاC*EBOacj2Žm10!_\IcO&ݲqKAU*Ϩ)$] %e^/)o mvr2oB "Vaƃ:o\csE&@N|_:C{);#@YPO~O ;fYH3`3y3}g>Zjsonlite/DESCRIPTION0000644000176200001440000000255612626264003013620 0ustar liggesusersPackage: jsonlite Version: 0.9.19 Title: A Robust, High Performance JSON Parser and Generator for R License: MIT + file LICENSE NeedsCompilation: yes Depends: methods Author: Jeroen Ooms, Duncan Temple Lang, Lloyd Hilaiel URL: http://arxiv.org/abs/1403.2805, https://www.opencpu.org/posts/jsonlite-a-smarter-json-encoder BugReports: http://github.com/jeroenooms/jsonlite/issues Maintainer: Jeroen Ooms VignetteBuilder: knitr, R.rsp Description: A fast JSON parser and generator optimized for statistical data and the web. Started out as a fork of 'RJSONIO', but has been completely rewritten in recent versions. The package offers flexible, robust, high performance tools for working with JSON in R and is particularly powerful for building pipelines and interacting with a web API. The implementation is based on the mapping described in the vignette (Ooms, 2014). In addition to converting JSON data from/to R objects, 'jsonlite' contains functions to stream, validate, and prettify JSON data. The unit tests included with the package verify that all edge cases are encoded and decoded consistently for use with dynamic data in systems and applications. Suggests: curl (>= 0.5), plyr, testthat, knitr, rmarkdown, R.rsp Packaged: 2015-11-27 20:06:25 UTC; jeroen Repository: CRAN Date/Publication: 2015-11-28 09:38:27 jsonlite/man/0000755000176200001440000000000012571554350012663 5ustar liggesusersjsonlite/man/unbox.Rd0000644000176200001440000000277112540777273014323 0ustar liggesusers% Generated by roxygen2 (4.1.1): do not edit by hand % Please edit documentation in R/unbox.R \name{unbox} \alias{unbox} \title{Unbox a vector or data frame} \usage{ unbox(x) } \arguments{ \item{x}{atomic vector of length 1, or data frame with 1 row.} } \value{ Returns a singleton version of \code{x}. } \description{ This function marks an atomic vector or data frame as a \href{http://en.wikipedia.org/wiki/Singleton_(mathematics)}{singleton}, i.e. a set with exactly 1 element. Thereby, the value will not turn into an \code{array} when encoded into JSON. This can only be done for atomic vectors of length 1, or data frames with exactly 1 row. To automatically unbox all vectors of length 1 within an object, use the \code{auto_unbox} argument in \code{\link{toJSON}}. } \details{ It is usually recommended to avoid this function and stick with the default encoding schema for the various \R{} classes. The only use case for this function is if you are bound to some specific predefined JSON structure (e.g. to submit to an API), which has no natural \R{} representation. Note that the default encoding for data frames naturally results in a collection of key-value pairs, without using \code{unbox}. } \examples{ toJSON(list(foo=123)) toJSON(list(foo=unbox(123))) # Auto unbox vectors of length one: x = list(x=1:3, y = 4, z = "foo", k = NULL) toJSON(x) toJSON(x, auto_unbox = TRUE) x <- iris[1,] toJSON(list(rec=x)) toJSON(list(rec=unbox(x))) } \references{ \url{http://en.wikipedia.org/wiki/Singleton_(mathematics)} } jsonlite/man/prettify.Rd0000644000176200001440000000102512540777273015025 0ustar liggesusers% Generated by roxygen2 (4.1.1): do not edit by hand % Please edit documentation in R/prettify.R \name{prettify, minify} \alias{minify} \alias{prettify} \alias{prettify, minify} \title{Prettify or minify a JSON string} \usage{ prettify(txt, indent = 4) minify(txt) } \arguments{ \item{txt}{JSON string} \item{indent}{number of spaces to indent} } \description{ Prettify adds indentation to a JSON string; minify removes all indentation/whitespace. } \examples{ myjson <- toJSON(cars) cat(myjson) prettify(myjson) minify(myjson) } jsonlite/man/fromJSON.Rd0000644000176200001440000001207412626136411014606 0ustar liggesusers% Generated by roxygen2 (4.1.1): do not edit by hand % Please edit documentation in R/fromJSON.R, R/toJSON.R \name{toJSON, fromJSON} \alias{fromJSON} \alias{jsonlite} \alias{toJSON} \alias{toJSON, fromJSON} \title{Convert \R{} objects to/from JSON} \usage{ fromJSON(txt, simplifyVector = TRUE, simplifyDataFrame = simplifyVector, simplifyMatrix = simplifyVector, flatten = FALSE, ...) toJSON(x, dataframe = c("rows", "columns", "values"), matrix = c("rowmajor", "columnmajor"), Date = c("ISO8601", "epoch"), POSIXt = c("string", "ISO8601", "epoch", "mongo"), factor = c("string", "integer"), complex = c("string", "list"), raw = c("base64", "hex", "mongo"), null = c("list", "null"), na = c("null", "string"), auto_unbox = FALSE, digits = 4, pretty = FALSE, force = FALSE, ...) } \arguments{ \item{txt}{a JSON string, URL or file} \item{simplifyVector}{coerce JSON arrays containing only primitives into an atomic vector} \item{simplifyDataFrame}{coerce JSON arrays containing only records (JSON objects) into a data frame} \item{simplifyMatrix}{coerce JSON arrays containing vectors of equal mode and dimension into matrix or array} \item{flatten}{automatically \code{\link{flatten}} nested data frames into a single non-nested data frame} \item{...}{arguments passed on to class specific \code{print} methods} \item{x}{the object to be encoded} \item{dataframe}{how to encode data.frame objects: must be one of 'rows', 'columns' or 'values'} \item{matrix}{how to encode matrices and higher dimensional arrays: must be one of 'rowmajor' or 'columnmajor'.} \item{Date}{how to encode Date objects: must be one of 'ISO8601' or 'epoch'} \item{POSIXt}{how to encode POSIXt (datetime) objects: must be one of 'string', 'ISO8601', 'epoch' or 'mongo'} \item{factor}{how to encode factor objects: must be one of 'string' or 'integer'} \item{complex}{how to encode complex numbers: must be one of 'string' or 'list'} \item{raw}{how to encode raw objects: must be one of 'base64', 'hex' or 'mongo'} \item{null}{how to encode NULL values within a list: must be one of 'null' or 'list'} \item{na}{how to print NA values: must be one of 'null' or 'string'. Defaults are class specific} \item{auto_unbox}{automatically \code{\link{unbox}} all atomic vectors of length 1. It is usually safer to avoid this and instead use the \code{\link{unbox}} function to unbox individual elements. An exception is that objects of class \code{AsIs} (i.e. wrapped in \code{I()}) are not automatically unboxed. This is a way to mark single values as length-1 arrays.} \item{digits}{max number of decimal digits to print for numeric values. Use \code{I()} to specify significant digits.} \item{pretty}{adds indentation whitespace to JSON output. Can be TRUE/FALSE or a number specifying the number of spaces to indent. See \code{\link{prettify}}} \item{force}{unclass/skip objects of classes with no defined JSON mapping} } \description{ These functions are used to convert between JSON data and \R{} objects. The \code{\link{toJSON}} and \code{\link{fromJSON}} functions use a class based mapping, which follows conventions outlined in this paper: \url{http://arxiv.org/abs/1403.2805} (also available as vignette). } \details{ The \code{\link{toJSON}} and \code{\link{fromJSON}} functions are drop-in replacements for the identically named functions in packages \code{rjson} and \code{RJSONIO}. Our implementation uses an alternative, somewhat more consistent mapping between \R{} objects and JSON strings. The \code{\link{serializeJSON}} and \code{\link{unserializeJSON}} functions in this package use an alternative system to convert between \R{} objects and JSON, which supports more classes but is much more verbose. A JSON string is always unicode, using \code{UTF-8} by default, hence there is usually no need to escape any characters. However, the JSON format does support escaping of unicode characters, which are encoded using a backslash followed by a lower case \code{"u"} and 4 hex characters, for example: \code{"Z\\u00FCrich"}. The \code{fromJSON} function will parse such escape sequences but it is usually preferable to encode unicode characters in JSON using native \code{UTF-8} rather than escape sequences. } \examples{ # Stringify some data jsoncars <- toJSON(mtcars, pretty=TRUE) cat(jsoncars) # Parse it back fromJSON(jsoncars) # Parse escaped unicode fromJSON('{"city" : "Z\\\\u00FCrich"}') # Decimal vs significant digits toJSON(pi, digits=3) toJSON(pi, digits=I(3)) \dontrun{retrieve data frame data1 <- fromJSON("https://api.github.com/users/hadley/orgs") names(data1) data1$login # Nested data frames: data2 <- fromJSON("https://api.github.com/users/hadley/repos") names(data2) names(data2$owner) data2$owner$login # Flatten the data into a regular non-nested dataframe names(flatten(data2)) # Flatten directly (more efficient): data3 <- fromJSON("https://api.github.com/users/hadley/repos", flatten = TRUE) identical(data3, flatten(data2)) } } \references{ Jeroen Ooms (2014). The \code{jsonlite} Package: A Practical and Consistent Mapping Between JSON Data and \R{} Objects. \emph{arXiv:1403.2805}. \url{http://arxiv.org/abs/1403.2805} } jsonlite/man/validate.Rd0000644000176200001440000000103512540777273014751 0ustar liggesusers% Generated by roxygen2 (4.1.1): do not edit by hand % Please edit documentation in R/validate.R \name{validate} \alias{validate} \title{Validate JSON} \usage{ validate(txt) } \arguments{ \item{txt}{JSON string} } \description{ Test if a string contains valid JSON. Characters vectors will be collapsed into a single string. } \examples{ #Output from toJSON and serializeJSON should pass validation myjson <- toJSON(mtcars) validate(myjson) #TRUE #Something bad happened truncated <- substring(myjson, 1, 100) validate(truncated) #FALSE } jsonlite/man/rbind.pages.Rd0000644000176200001440000000321312540777273015354 0ustar liggesusers% Generated by roxygen2 (4.1.1): do not edit by hand % Please edit documentation in R/rbind.pages.R \name{rbind.pages} \alias{rbind.pages} \title{Combine pages into a single data frame} \usage{ rbind.pages(pages) } \arguments{ \item{pages}{a list of data frames, each representing a \emph{page} of data} } \description{ The \code{rbind.pages} function is used to combine a list of data frames into a single data frame. This is often needed when working with a JSON API that limits the amount of data per request. If we need more data than what fits in a single request, we need to perform multiple requests that each retrieve a fragment of data, not unlike pages in a book. In practice this is often implemented using a \code{page} parameter in the API. The \code{rbind.pages} function can be used to combine these pages back into a single dataset. } \details{ The \code{\link{rbind.pages}} function generalizes \code{\link[base:rbind]{base::rbind}} and \code{\link[plyr:rbind.fill]{plyr::rbind.fill}} with added support for nested data frames. Not each column has to be present in each of the individual data frames; missing columns will be filled up in \code{NA} values. } \examples{ # Basic example x <- data.frame(foo = rnorm(3), bar = c(TRUE, FALSE, TRUE)) y <- data.frame(foo = rnorm(2), col = c("blue", "red")) rbind.pages(list(x, y)) \dontrun{ baseurl <- "http://projects.propublica.org/nonprofits/api/v1/search.json" pages <- list() for(i in 0:20){ mydata <- fromJSON(paste0(baseurl, "?order=revenue&sort_order=desc&page=", i)) message("Retrieving page ", i) pages[[i+1]] <- mydata$filings } filings <- rbind.pages(pages) nrow(filings) colnames(filings) } } jsonlite/man/flatten.Rd0000644000176200001440000000214312540777273014616 0ustar liggesusers% Generated by roxygen2 (4.1.1): do not edit by hand % Please edit documentation in R/flatten.R \name{flatten} \alias{flatten} \title{Flatten nested data frames} \usage{ flatten(x, recursive = TRUE) } \arguments{ \item{x}{a data frame} \item{recursive}{flatten recursively} } \description{ In a nested data frame, one or more of the columns consist of another data frame. These structures frequently appear when parsing JSON data from the web. We can flatten such data frames into a regular 2 dimensional tabular structure. } \examples{ options(stringsAsFactors=FALSE) x <- data.frame(driver = c("Bowser", "Peach"), occupation = c("Koopa", "Princess")) x$vehicle <- data.frame(model = c("Piranha Prowler", "Royal Racer")) x$vehicle$stats <- data.frame(speed = c(55, 34), weight = c(67, 24), drift = c(35, 32)) str(x) str(flatten(x)) str(flatten(x, recursive = FALSE)) \dontrun{ data1 <- fromJSON("https://api.github.com/users/hadley/repos") colnames(data1) colnames(data1$owner) colnames(flatten(data1)) # or for short: data2 <- fromJSON("https://api.github.com/users/hadley/repos", flatten = TRUE) colnames(data2) } } jsonlite/man/serializeJSON.Rd0000644000176200001440000000342512540777273015646 0ustar liggesusers% Generated by roxygen2 (4.1.1): do not edit by hand % Please edit documentation in R/serializeJSON.R \name{serializeJSON} \alias{serializeJSON} \alias{unserializeJSON} \title{serialize R objects to JSON} \usage{ serializeJSON(x, digits = 8, pretty = FALSE) unserializeJSON(txt) } \arguments{ \item{x}{an \R{} object to be serialized} \item{digits}{max number of digits (after the dot) to print for numeric values} \item{pretty}{add indentation/whitespace to JSON output. See \code{\link{prettify}}} \item{txt}{a JSON string which was created using \code{serializeJSON}} } \description{ The \code{\link{serializeJSON}} and \code{\link{unserializeJSON}} functions convert between \R{} objects to JSON data. Instead of using a class based mapping like \code{\link{toJSON}} and \code{\link{fromJSON}}, the serialize functions base the encoding schema on the storage type, and capture all data and attributes from any object. Thereby the object can be restored almost perfectly from its JSON representation, but the resulting JSON output is very verbose. Apart from environments, all standard storage types are supported. } \note{ JSON is a text based format which leads to loss of precision when printing numbers. } \examples{ jsoncars <- serializeJSON(mtcars) mtcars2 <- unserializeJSON(jsoncars) identical(mtcars, mtcars2) set.seed('123') myobject <- list( mynull = NULL, mycomplex = lapply(eigen(matrix(-rnorm(9),3)), round, 3), mymatrix = round(matrix(rnorm(9), 3),3), myint = as.integer(c(1,2,3)), mydf = cars, mylist = list(foo='bar', 123, NA, NULL, list('test')), mylogical = c(TRUE,FALSE,NA), mychar = c('foo', NA, 'bar'), somemissings = c(1,2,NA,NaN,5, Inf, 7 -Inf, 9, NA), myrawvec = charToRaw('This is a test') ); identical(unserializeJSON(serializeJSON(myobject)), myobject); } jsonlite/man/stream_in.Rd0000644000176200001440000001356112621624030015126 0ustar liggesusers% Generated by roxygen2 (4.1.1): do not edit by hand % Please edit documentation in R/stream.R \name{stream_in, stream_out} \alias{stream_in} \alias{stream_in, stream_out} \alias{stream_out} \title{Streaming JSON input/output} \usage{ stream_in(con, handler = NULL, pagesize = 500, verbose = TRUE, ...) stream_out(x, con = stdout(), pagesize = 500, verbose = TRUE, ...) } \arguments{ \item{con}{a \code{\link{connection}} object. If the connection is not open, \code{stream_in} and \code{stream_out} will automatically open and later close (and destroy) the connection. See details.} \item{handler}{a custom function that is called on each page of JSON data. If not specified, the default handler stores all pages and binds them into a single data frame that will be returned by \code{stream_in}. See details.} \item{pagesize}{number of lines to read/write from/to the connection per iteration.} \item{verbose}{print some information on what is going on.} \item{...}{arguments for \code{\link{fromJSON}} and \code{\link{toJSON}} that control JSON formatting/parsing where applicable. Use with caution.} \item{x}{object to be streamed out. Currently only data frames are supported.} } \value{ The \code{stream_out} function always returns \code{NULL}. When no custom handler is specified, \code{stream_in} returns a data frame of all pages binded together. When a custom handler function is specified, \code{stream_in} always returns \code{NULL}. } \description{ The \code{stream_in} and \code{stream_out} functions implement line-by-line processing of JSON data over a \code{\link{connection}}, such as a socket, url, file or pipe. JSON streaming requires the \href{http://ndjson.org}{ndjson} format, which slightly differs from \code{\link{fromJSON}} and \code{\link{toJSON}}, see details. } \details{ Because parsing huge JSON strings is difficult and inefficient, JSON streaming is done using \strong{lines of minified JSON records}, a.k.a. \href{http://ndjson.org}{ndjson}. This is pretty standard: JSON databases such as \href{https://github.com/maxogden/dat}{dat} or MongoDB use the same format to import/export datasets. Note that this means that the total stream combined is not valid JSON itself; only the individual lines are. Also note that because line-breaks are used as separators, prettified JSON is not permitted: the JSON lines \emph{must} be minified. In this respect, the format is a bit different from \code{\link{fromJSON}} and \code{\link{toJSON}} where all lines are part of a single JSON structure with optional line breaks. The \code{handler} is a callback function which is called for each page (batch) of JSON data with exactly one argument (usually a data frame with \code{pagesize} rows). If \code{handler} is missing or \code{NULL}, a default handler is used which stores all intermediate pages of data, and at the very end binds all pages together into one single data frame that is returned by \code{stream_in}. When a custom \code{handler} function is specified, \code{stream_in} does not store any intermediate results and always returns \code{NULL}. It is then up to the \code{handler} to process or store data pages. A \code{handler} function that does not store intermediate results in memory (for example by writing output to another connection) results in a pipeline that can process an unlimited amount of data. See example. If a connection is not opened yet, \code{stream_in} and \code{stream_out} will automatically open and later close the connection. Because R destroys connections when they are closed, they cannot be reused. To use a single connection for multiple calls to \code{stream_in} or \code{stream_out}, it needs to be opened beforehand. See example. } \examples{ # compare formats x <- iris[1:3,] toJSON(x) stream_out(x) # Trivial example mydata <- stream_in(url("http://httpbin.org/stream/100")) \dontrun{stream large dataset to file and back library(nycflights13) stream_out(flights, file(tmp <- tempfile())) flights2 <- stream_in(file(tmp)) unlink(tmp) all.equal(flights2, as.data.frame(flights)) # stream over HTTP diamonds2 <- stream_in(url("http://jeroenooms.github.io/data/diamonds.json")) # stream over HTTP with gzip compression flights3 <- stream_in(gzcon(url("http://jeroenooms.github.io/data/nycflights13.json.gz"))) all.equal(flights3, as.data.frame(flights)) # stream over HTTPS (HTTP+SSL) via curl library(curl) flights4 <- stream_in(gzcon(curl("https://jeroenooms.github.io/data/nycflights13.json.gz"))) all.equal(flights4, as.data.frame(flights)) # or alternatively: flights5 <- stream_in(gzcon(pipe("curl https://jeroenooms.github.io/data/nycflights13.json.gz"))) all.equal(flights5, as.data.frame(flights)) # Full JSON IO stream from URL to file connection. # Calculate delays for flights over 1000 miles in batches of 5k library(dplyr) con_in <- gzcon(url("http://jeroenooms.github.io/data/nycflights13.json.gz")) con_out <- file(tmp <- tempfile(), open = "wb") stream_in(con_in, handler = function(df){ df <- dplyr::filter(df, distance > 1000) df <- dplyr::mutate(df, delta = dep_delay - arr_delay) stream_out(df, con_out, pagesize = 1000) }, pagesize = 5000) close(con_out) # stream it back in mydata <- stream_in(file(tmp)) nrow(mydata) unlink(tmp) # Data from http://openweathermap.org/current#bulk # Each row contains a nested data frame. daily14 <- stream_in(gzcon(url("http://78.46.48.103/sample/daily_14.json.gz")), pagesize=50) subset(daily14, city$name == "Berlin")$data[[1]] # Or with dplyr: library(dplyr) daily14f <- flatten(daily14) filter(daily14f, city.name == "Berlin")$data[[1]] # Stream import large data from zip file tmp <- tempfile() download.file("http://jsonstudio.com/wp-content/uploads/2014/02/companies.zip", tmp) companies <- stream_in(unz(tmp, "companies.json")) } } \references{ MongoDB export format: \url{http://docs.mongodb.org/manual/reference/program/mongoexport/#cmdoption--query} Documentation for the JSON Lines text file format: \url{http://jsonlines.org/} } jsonlite/LICENSE0000644000176200001440000000005112540777273013120 0ustar liggesusersYEAR: 2015 COPYRIGHT HOLDER: Jeroen Ooms