CFtime/ 0000755 0001762 0000144 00000000000 14761426557 011442 5 ustar ligges users CFtime/tests/ 0000755 0001762 0000144 00000000000 14463144520 012566 5 ustar ligges users CFtime/tests/testthat/ 0000755 0001762 0000144 00000000000 14761426557 014444 5 ustar ligges users CFtime/tests/testthat/test-CFbounds.R 0000644 0001762 0000144 00000004717 14726267600 017251 0 ustar ligges users test_that("bounds works", { t <- CFtime("days since 2024-01-01", "standard") off <- seq(from = 0.5, by = 1, length.out = 10) bnds <- rbind(0:9, 1:10) expect_null(bounds(t)) # no offsets expect_error(bounds(t) <- bnds) # t <- t + off expect_null(bounds(t)) # bounds not set bounds(t) <- TRUE expect_equal(bounds(t), bnds) expect_equal(bounds(t, "%Y-%m-%d")[2,1:3], c("2024-01-02", "2024-01-03", "2024-01-04")) bounds(t) <- FALSE expect_null(bounds(t)) expect_error(bounds(t) <- matrix(1:12, nrow = 4)) expect_error(bounds(t) <- "plain wrong") expect_error(bounds(t) <- bnds * 10) bounds(t) <- bnds expect_match(capture_output(t$print()), "Bounds : regular and consecutive$") hr6 <- rbind(off - 0.25, off + 0.25) bounds(t) <- hr6 expect_match(capture_output(t$print()), "Bounds : irregular$") expect_equal(bounds(t), hr6) expect_equal(bounds(t, "%H")[,1], c("06", "18")) }) test_that("indexOf() works", { off <- (23*360):(24*360-1) + 0.5 # year 2024 days t <- CFtime("days since 2001-01-01", "360_day", off) x <- c("1999-02-12", # pre-origin "2023-06-23", # pre-time series "2024-01-30", "2024-01-31", # non-existent date "2024-02-01", "2024-02-30", "2024-03-01", "2025-01-01") # post-time series expect_error(indexOf(x, t, method = 4)) expect_error(indexOf(TRUE, t)) expect_error(indexOf(x, CFtime("months since 2001-01-01", "standard", 0:23))) expect_error(indexOf(x, t, nomatch = "July")) # must be able to coerce to numeric expect_equal(indexOf(x, t)[1:8], c(0, 0, 29, NA, 30, 59, 60, .Machine$integer.max)) expect_equal(indexOf(x, t, method = "linear")[1:8], c(0, 0, 29.5, NA, 30.5, 59.5, 60.5, .Machine$integer.max)) n <- 1:3 out <- indexOf(n, t) outt <- attr(out, "CFTime") expect_equal(as_timestamp(t)[1:3], as_timestamp(outt)) n <- c(-1, -2, -3) out <- indexOf(n, t) outt <- attr(out, "CFTime") expect_equal(length(outt), 360 - 3) expect_equal(as_timestamp(t)[4:6], as_timestamp(outt)[1:3]) expect_error(indexOf(c(-1, 1), t)) # Attached CFtime must have valid timestamps in `x` out <- indexOf(x, t) outt <- attr(out, "CFTime") expect_equal(as_timestamp(outt), x[!is.na(out) & out > 0 & out < .Machine$integer.max]) bounds(t) <- TRUE expect_equal(indexOf(x, t)[1:8], c(0, 0, 30, NA, 31, 60, 61, .Machine$integer.max)) expect_equal(indexOf(x, t, method = "linear")[1:8], c(0, 0, 30, NA, 31, 60, 61, .Machine$integer.max)) }) CFtime/tests/testthat/test-CFformat.R 0000644 0001762 0000144 00000030476 14760032757 017251 0 ustar ligges users test_that("Creating timestamps", { expect_error(as_timestamp("1-2-3")) # No CFtime as first argument cf <- CFtime("hours since 2001-01-01", "365_day") expect_null(as_timestamp(cf)) # No offsets cf <- cf + 0L:2399L expect_error(as_timestamp(cf, "d")) # Wrong format specifier expect_error(as_timestamp(cf, asPOSIX = T)) # No POSIXt on a non-standard calendar expect_equal(length(as_timestamp(cf)), 2400L) cf <- CFtime("days since 2001-01-01", "standard", 0L:364L) expect_equal(length(as_timestamp(cf)), 365L) expect_equal(nchar(as_timestamp(cf)[1]), 10L) # date string expect_equal(length(as_timestamp(cf, "date", TRUE)), 365L) expect_equal(length(as_timestamp(cf, "timestamp", TRUE)), 365L) }) test_that("Using format()", { cf <- CFTime$new("days since 2001-01-01 18:10:30-04", "365_day") expect_equal(length(cf$format()), 0L) # no offsets cf <- cf + 0:364 expect_equal(format(cf)[1], "2001-01-01 18:10:30") # format parameter missing expect_error(format(cf, 123)) # format parameter must be character expect_error(format(cf, c("doesn't", "work", "either"))) expect_equal(format(cf, "Timestamp is: %%%F%%")[1], "Timestamp is: %2001-01-01%") expect_equal(format(cf, "Timestamp is: %R")[1], "Timestamp is: 18:10") expect_equal(format(cf, "%T%z")[1], "18:10:30-0400") #expect_equal(format(cf, "%b")[c(1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335)], month.abb) # en_EN only #expect_equal(format(cf, "%B")[c(1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335)], month.name) # expect_equal(format(cf, "%Od-%e-%I%p")[5], "05- 5-06PM") }) test_that("CFfactor testing", { # No offsets cf <- CFtime("days since 2000-01-01", "365_day") expect_error(CFfactor()) expect_error(CFfactor(cf)) expect_error(CFfactor(cf, "zxcv")) expect_error(CFfactor(cf, c("day", "month"))) expect_error(CFfactor(cf, "hour")) expect_error(CFfactor(cf, "month", "bad")) month_days <- c(31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31) leap_month_days <- c(31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31) dekad_days <- c(10, 10, 11, 10, 10, 8, 10, 10, 11, 10, 10, 10, 10, 10, 11, 10, 10, 10, 10, 10, 11, 10, 10, 11, 10, 10, 10, 10, 10, 11, 10, 10, 10, 10, 10, 11) leap_dekad_days <- c(10, 10, 11, 10, 10, 9, 10, 10, 11, 10, 10, 10, 10, 10, 11, 10, 10, 10, 10, 10, 11, 10, 10, 11, 10, 10, 10, 10, 10, 11, 10, 10, 10, 10, 10, 11) # Few offsets cf <- cf + 365L:370L expect_error(CFfactor(cf)) cf <- cf + 371L:7664L # 20 years of offsets # Regular factors for all available periods np <- c(20, 81, 80, 240, 720, 7300) first <- c("2001", "2001S1", "2001Q1", "2001-01", "2001D01", "2001-01-01") last <- c("2020", "2021S1", "2020Q4", "2020-12", "2020D36", "2020-12-31") for (p in 1:6) { f <- CFfactor(cf, CFt$factor_periods[p]) expect_equal(as.character(f)[1L], first[p]) expect_equal(as.character(f)[7300L], last[p]) newcf <- attr(f, "CFTime") bnds <- bounds(newcf) expect_equal(definition(cf), definition(newcf)) expect_true(is.matrix(bnds)) expect_type(bnds, "double") expect_equal(dim(bnds), c(2, np[p])) } # Era factors for all available periods eras <- list(first = 2001L, double = 2002L:2003L, final3 = 2018L:2020L, outside = 2022L) lvls <- c(1L, 4L, 4L, 12L, 36L, 365L) expect_error(cf$factor("month", "bad")) for (p in 1:6) { # year, season, quarter, month, dekad, day f <- CFfactor(cf, CFt$factor_periods[p], eras) expect_type(f, "list") expect_equal(length(f), 4L) expect_equal(length(f$first), 7300L) expect_equal(attr(f$first, "period"), CFt$factor_periods[p]) expect_equal(attr(f$first, "era"), 1L) expect_null(attr(f$first, "zxcv")) if (p == 1L) { expect_equal(length(levels(f$first)), 1L) expect_equal(length(levels(f$double)), 2L) expect_equal(length(levels(f$final3)), 3L) expect_equal(length(levels(f$outside)), 0L) } else if (p %in% c(2L, 3L)) { expect_equal(length(levels(f$first)), 4L) expect_equal(length(levels(f$double)), 4L) expect_equal(length(levels(f$final3)), 4L) expect_equal(length(levels(f$outside)), 0L) } else { expect_equal(length(levels(f$first)), lvls[p]) expect_equal(length(levels(f$double)), lvls[p]) expect_equal(length(levels(f$final3)), lvls[p]) expect_equal(length(levels(f$outside)), 0L) } } # Single era value for all available periods for (p in 1:6) { # year, season, quarter, month, dekad, day f <- CFfactor(cf, CFt$factor_periods[p], 2002L) expect_s3_class(f, "factor") expect_equal(length(f), 7300L) expect_equal(length(levels(f)), lvls[p]) expect_equal(attr(f, "period"), CFt$factor_periods[p]) expect_equal(attr(f, "era"), 1L) expect_null(attr(f, "zxcv")) } # Units and coverage in factor levels expect_error(CFfactor_units("zxcv")) expect_error(CFfactor_units(cf, "zxcv")) expect_error((CFfactor_units(cf, list(12)))) expect_error((CFfactor_units(cf, factor(letters)))) expect_error((CFfactor_units(cf, list(factor(letters))))) f <- CFfactor(cf, "year") expect_true(all(CFfactor_units(cf, f) == 365L)) expect_true(all(CFfactor_coverage(cf, f, "absolute") == 365L)) expect_true(all(CFfactor_coverage(cf, f, "relative") == 1L)) f <- CFfactor(cf, "season") expect_equal(sum(CFfactor_units(cf, f)), 7390L) # 20 yrs * 365 plus 90 bc DJF season present on both ends expect_true(all(CFfactor_units(cf, f) %in% 90L:92L)) x <- CFfactor_coverage(cf, f, "absolute") expect_equal(x[1L], 59L) # Jan + Feb of first year expect_equal(x[81L], 31L) # Dec of last year expect_true(all(x[2L:80L] %in% 90L:92L)) x <- CFfactor_coverage(cf, f, "relative") expect_equal(x[1L] + x[81L], 1L) expect_true(all(x[2L:80L] == 1L)) f <- CFfactor(cf, "quarter") expect_equal(sum(CFfactor_units(cf, f)), 7300L) expect_true(all(CFfactor_units(cf, f) %in% 90L:92L)) x <- CFfactor_coverage(cf, f, "absolute") expect_equal(x[1L], 90L) expect_equal(x[80L], 92L) expect_true(all(x %in% 90L:92L)) x <- CFfactor_coverage(cf, f, "relative") expect_true(all(x == 1L)) f <- CFfactor(cf, "month") expect_equal(sum(CFfactor_units(cf, f)), 7300L) expect_true(all(CFfactor_coverage(cf, f, "absolute") == month_days)) expect_true(all(CFfactor_coverage(cf, f, "relative") == 1L)) f <- CFfactor(cf, "dekad") expect_equal(sum(CFfactor_units(cf, f)), 7300L) x <- CFfactor_coverage(cf, f, "absolute") expect_true(all(x == dekad_days)) expect_true(all(CFfactor_coverage(cf, f, "relative") == 1L)) f <- CFfactor(cf, "day") expect_equal(sum(CFfactor_units(cf, f)), 7300L) expect_true(all(CFfactor_coverage(cf, f, "absolute") == 1L)) expect_true(all(CFfactor_coverage(cf, f, "relative") == 1L)) # 360_day calendar cf360 <- CFtime("days since 2001-01-01", "360_day", 0:7199) f <- CFfactor(cf360, "month") expect_equal(sum(CFfactor_units(cf360, f)), 7200L) expect_true(all(CFfactor_coverage(cf360, f, "absolute") == 30L)) expect_true(all(CFfactor_coverage(cf360, f, "relative") == 1L)) # Units and coverage in factor levels with eras f <- CFfactor(cf, "year", eras) expect_true(all(unlist(CFfactor_units(cf, f)) == rep(365L, 6L))) expect_true(all(unlist(CFfactor_coverage(cf, f, "absolute")) == c(rep(365L, 6L), 0L))) expect_equal(sum(sapply(CFfactor_coverage(cf, f, "relative"), sum)), 3L) f <- CFfactor(cf, "season", eras) expect_true(all(sapply(CFfactor_units(cf, f), function(x) {all(x == c(90L, 92L, 92L, 91L))}))) x <- CFfactor_coverage(cf, f, "absolute") expect_equal(x$first[1L], 59L) # Jan + Feb of first year at beginning of time series expect_equal(x$double, c(180L, 184L, 184L, 182L)) # two full years expect_equal(x$final3, c(270L, 276L, 276L, 273L)) # three full years x <- unlist(CFfactor_coverage(cf, f, "relative")) #expect_equal(x[1], 59 / 90). # works in the console but not here expect_true(all(x[2L:12L] == 1L)) f <- CFfactor(cf, "month", eras) expect_true(all(sapply(CFfactor_units(cf, f), function(x) {all(x == month_days)}))) x <- CFfactor_coverage(cf, f, "absolute") expect_true(all(x$first == month_days)) expect_true(all(x$double == month_days * 2L)) expect_true(all(x$final3 == month_days * 3L)) expect_true(all(unlist(CFfactor_coverage(cf, f, "relative")) == 1L)) f <- CFfactor(cf, "dekad", eras) expect_true(all(sapply(CFfactor_units(cf, f), function(x) {all(x == dekad_days)}))) x <- CFfactor_coverage(cf, f, "absolute") expect_true(all(x$first == dekad_days)) expect_true(all(x$double == dekad_days * 2L)) expect_true(all(x$final3 == dekad_days * 3L)) expect_true(all(unlist(CFfactor_coverage(cf, f, "relative")) == 1L)) f <- CFfactor(cf, "day", eras) expect_true(all(unlist(CFfactor_units(cf, f)) == 1L)) x <- CFfactor_coverage(cf, f, "absolute") expect_true(all(x$first == 1L)) expect_true(all(x$double == 2L)) expect_true(all(x$final3 == 3L)) expect_true(all(unlist(CFfactor_coverage(cf, f, "relative")) == 1L)) # all_leap calendar cf366 <- CFtime("days since 2001-01-01", "all_leap", 0:7319) f <- CFfactor(cf366, "year", eras) expect_true(all(unlist(CFfactor_units(cf366, f)) == rep(366L, 6L))) expect_true(all(unlist(CFfactor_coverage(cf366, f, "absolute")) == c(rep(366L, 6L), 0L))) expect_equal(sum(sapply(CFfactor_coverage(cf366, f, "relative"), sum)), 3L) f <- CFfactor(cf366, "season", eras) expect_true(all(sapply(CFfactor_units(cf366, f), function(x) {all(x == c(91L, 92L, 92L, 91L))}))) x <- CFfactor_coverage(cf366, f, "absolute") expect_equal(x$first[1L], 60L) # Jan + Feb of first year at beginning of time series expect_equal(x$double, c(182L, 184L, 184L, 182L)) # two full years expect_equal(x$final3, c(273L, 276L, 276L, 273L)) # three full years x <- unlist(CFfactor_coverage(cf366, f, "relative")) #expect_equal(x[1], 60 / 90). # works in the console but not here expect_true(all(x[2L:12L] == 1L)) f <- CFfactor(cf366, "month", eras) expect_true(all(sapply(CFfactor_units(cf366, f), function(x) {all(x == leap_month_days)}))) x <- CFfactor_coverage(cf366, f, "absolute") expect_true(all(x$first == leap_month_days)) expect_true(all(x$double == leap_month_days * 2L)) expect_true(all(x$final3 == leap_month_days * 3L)) expect_true(all(unlist(CFfactor_coverage(cf366, f, "relative")) == 1L)) f <- CFfactor(cf366, "dekad", eras) expect_true(all(sapply(CFfactor_units(cf366, f), function(x) {all(x == leap_dekad_days)}))) x <- CFfactor_coverage(cf366, f, "absolute") expect_true(all(x$first == leap_dekad_days)) expect_true(all(x$double == leap_dekad_days * 2L)) expect_true(all(x$final3 == leap_dekad_days * 3L)) expect_true(all(unlist(CFfactor_coverage(cf366, f, "relative")) == 1L)) f <- CFfactor(cf366, "day", eras) expect_true(all(unlist(CFfactor_units(cf366, f)) == 1L)) x <- CFfactor_coverage(cf366, f, "absolute") expect_true(all(x$first == 1L)) expect_true(all(x$double == 2L)) expect_true(all(x$final3 == 3L)) expect_true(all(unlist(CFfactor_coverage(cf366, f, "relative")) == 1L)) # Factors on data not aligned by year t <- CFTime$new("days since 2020-07-01", "standard", 0:364) first <- c("2020", "2020S3", "2020Q3", "2020-07", "2020D19", "2020-07-01") last <- c("2021", "2021S3", "2021Q2", "2021-06", "2021D18", "2021-06-30") for (p in 1:6) { f <- t$factor(CFt$factor_periods[p]) expect_equal(as.character(f)[1L], first[p]) expect_equal(as.character(f)[365L], last[p]) } # Incomplete coverage n <- 365L * 20L cov <- 0.8 offsets <- sample(0L:(n-1L), n * cov) expect_warning(cf <- CFtime("days since 2020-01-01", "365_day", offsets)) f <- CFfactor(cf, "month") x <- CFfactor_coverage(cf, f, "absolute") expect_equal(sum(x), n * cov) x <- CFfactor_coverage(cf, f, "relative") expect_true((cov - 0.01) < mean(x) && mean(x) < (cov + 0.01)) }) test_that("cut() works", { cf <- CFtime("days since 2020-01-01", "360_day", 0:719) expect_error(cut("sfg")) expect_error(cut(cf)) expect_error(cut(cf, breaks = 5)) expect_error(cut(cf, "")) expect_error(cut(cf, "blah")) f <- cut(cf, "quarter") expect_equal(nlevels(f), 8) expect_equal(levels(f), c("2020Q1", "2020Q2", "2020Q3", "2020Q4", "2021Q1", "2021Q2", "2021Q3", "2021Q4")) f <- cut(cf, c("2021-01-01", "2020-04-03")) # out of order expect_s3_class(f, "factor") expect_equal(levels(f), "2020-04-03") f <- cut(cf, c("2020-01-01", "2020-06-17", "2021-01-01", "2021-04-12", "2401-01-01")) expect_equal(levels(f), c("2020-01-01", "2020-06-17", "2021-01-01", "2021-04-12")) }) CFtime/tests/testthat/test-parse_deparse.R 0000644 0001762 0000144 00000014764 14760146635 020370 0 ustar ligges users test_that("timestamp string parsing to offsets and deparsing of offsets to timestamps", { # This test tests: global CFt* constants, CFtime(), parse_timestamps(), as_timestamp(): # decomposing offsets into timestamp elements, generating timestamp strings, # parsing timestamp strings back into timestamp elements, including negative # offsets. for (c in c("standard", "proleptic_gregorian", "julian", "tai", "360_day", "365_day", "366_day", "noleap", "all_leap")) { for (u in 1:4) { offsets <- -1000:1000 def <- paste(CFt$units$name[u], "since 1992-08-20") t <- CFtime(def, c, offsets) time <- t$cal$offsets2time(t$offsets) ts <- as_timestamp(t, "timestamp") cf2 <- CFtime(def, c) tp <- parse_timestamps(cf2, ts) expect_equal(tp, time) } } # Add no offsets, test return value t <- CFTime$new("days since 1992-08-20", "standard") res <- t$cal$offsets2time() expect_equal(nrow(res), 0L) }) test_that("testing calendars with leap years", { # This test tests that for standard, proleptic_gregorian, julian and tai # calendars in leap years before/on/after the leap day function as needed. # Also testing year 2000 and 2100 offsets. for (c in c("standard", "proleptic_gregorian", "tai", "julian")) { for (d in c("1996-01-15", "1996-02-29", "1996-04-01")) { def <- paste("days since", d) t <- CFtime(def, c, c(1:2500, 36501:39000)) time <- t$cal$offsets2time(t$offsets) ts <- as_timestamp(t, "timestamp") cf2 <- CFtime(def, c) tp <- parse_timestamps(cf2, ts) expect_equal(tp, time) } } }) # test_that("Testing milli-second timestamp string parsing to offsets and deparsing # of offsets to timestamps", { # # This test tests: global CFt* constants, CFtime(), parse_timestamps(), as_timestamp(): # # decomposing offsets into milli-second timestamp elements, generating # # timestamp strings, parsing timestamp strings back into timestamp elements. # for (c in c("standard", "proleptic_gregorian", "julian", "tai", "360_day", "365_day", "366_day", "noleap", "all_leap")) { # for (u in 1:2) { # offsets <- runif(10000, max = 10000) # def <- paste(CFt$units$name[u], "since 1978-08-20 07:34:12.2") # expect_warning(t <- CFtime(def, c, offsets)) # not ordered # time <- t$cal$offsets2time(t$offsets) # ts <- as_timestamp(t, "timestamp") # cf2 <- CFtime(def, c) # tp <- parse_timestamps(cf2, ts) # expect_equal(round(tp[c("second", "offset")], 2), round(time[c("second", "offset")], 2)) # } # } # }) test_that("Disallow parsing of timestamps on month and year calendar units", { for (c in c("standard", "proleptic_gregorian", "julian", "tai", "360_day", "365_day", "366_day", "noleap", "all_leap")) { for (u in 5:6) { def <- paste(CFt$units$name[u], "since 2020-01-01") t <- CFtime(def, c, 0:23) ts <- as_timestamp(t, "timestamp") expect_error(parse_timestamps(t, ts)) } } }) test_that("Gregorian/Julian calendar gap in the standard calendar", { t <- CFtime("days since 1582-10-01", "standard", 0:3) ts <- as_timestamp(t) expect_equal(ts[4], "1582-10-04") expect_false(t$cal$POSIX_compatible(0:3)) t <- t + 4:10 ts <- as_timestamp(t) expect_equal(ts[5], "1582-10-15") expect_equal(ts[11], "1582-10-21") expect_equal(parse_timestamps(t, c("1582-09-30", ts))$offset, -1:10) t <- CFtime("days since 1582-10-20", "standard", -10:0) ts <- as_timestamp(t) expect_equal(ts[5], "1582-10-04") expect_equal(ts[6], "1582-10-15") expect_false(t$POSIX_compatible()) # use t$offsets expect_true(t$cal$POSIX_compatible(0:100)) # use supplied offsets expect_true(t$cal$POSIX_compatible(-5:100)) expect_false(t$cal$POSIX_compatible(-6:100)) }) test_that("Leap seconds in the utc calendar work fine", { expect_error(CFtime("seconds since 1792-01-01", "utc")) # No UTC in 1792 t <- CFTime$new("seconds since 1972-06-30 23:59:57", "utc", 1:4) expect_equal(as_timestamp(t), c("1972-06-30 23:59:58", "1972-06-30 23:59:59", "1972-06-30 23:59:60", "1972-07-01 00:00:00")) expect_error(CFTime$new("seconds since 1973-06-30 23:59:60", "utc", -1:2)) # Bad leap second t <- CFTime$new("seconds since 1972-06-30 23:59:60", "utc", -1:2) expect_equal(as_timestamp(t), c("1972-06-30 23:59:59", "1972-06-30 23:59:60", "1972-07-01 00:00:00", "1972-07-01 00:00:01")) t <- CFTime$new("seconds since 2016-12-31 23:59:58", "utc", 1:4) expect_equal(as_timestamp(t), c("2016-12-31 23:59:59", "2016-12-31 23:59:60", "2017-01-01 00:00:00", "2017-01-01 00:00:01")) t <- CFTime$new("minutes since 2016-12-31 23:58:00", "utc", 1:4) expect_equal(as_timestamp(t), c("2016-12-31 23:59:00", "2016-12-31 23:59:60", "2017-01-01 00:00:59", "2017-01-01 00:01:59")) t <- CFTime$new("hours since 2016-12-31 22:00", "utc", 1:4) expect_equal(as_timestamp(t), c("2016-12-31 23:00:00", "2016-12-31 23:59:60", "2017-01-01 00:59:59", "2017-01-01 01:59:59")) t <- CFTime$new("days since 2016-12-30", "utc", 1:4) expect_equal(as_timestamp(t), c("2016-12-31 00:00:00", "2016-12-31 23:59:60", "2017-01-01 23:59:59", "2017-01-02 23:59:59")) }) test_that("Fractional time parts and replace Z timezone with offset", { offsets <- c("1980-05-06Z", "1980-05-06 12.32Z", "1980-05-06 12:54.38Z", "1980-05-06 12:54:12.32Z") res <- matrix(c(0.00, 0.00, 0.80, 0.32, 0.0000, 0.2000, 0.3800, 0.2053, 0.0000, 0.3200, 0.9063, 0.9034, 0.0000, 0.5133, 0.5378, 0.5376), ncol = 4) for (c in c("standard", "proleptic_gregorian", "julian", "tai", "360_day", "365_day", "366_day", "noleap", "all_leap")) { for (u in 1:4) { def <- paste(CFt$units$name[u], "since 1978-08-20") t <- CFtime(def, c) p <- t$cal$parse(offsets) expect_equal(round(p$offset %% 1, 4), res[,u]) expect_equal(p$tz, rep("+0000", 4)) } } offsets <- c("1980-05-06", "1980-05-06 12.32", "1980-05-06 12:54.38", "1980-05-06 12:54:12.32") res <- matrix(c(0.00, 0.00, 0.80, 0.32, 0.0333, 0.2333, 0.4133, 0.2387, 0.0006, 0.3206, 0.9069, 0.9040, 0.0000, 0.5134, 0.5378, 0.5377), ncol = 4) for (u in 1:4) { def <- paste(CFt$units$name[u], "since 1978-08-20") t <- CFtime(def, "utc") p <- t$cal$parse(offsets) expect_equal(round(p$offset %% 1, 4), res[,u]) } }) CFtime/tests/testthat/test-functions.R 0000644 0001762 0000144 00000002737 14757613653 017565 0 ustar ligges users test_that("Various minor functions", { cf <- CFtime("days since 2001-01-01", "standard", 0:999) cf2 <- CFTime$new("days since 2000-01-01", "julian", 0:999) cf3 <- CFtime("days since 2001-01-01", "360_day", 0:999) cf4 <- CFtime("days since 2001-01-01", "365_day", 0:999) cf5 <- CFtime("days since 2001-01-01", "366_day", 0:999) cf6 <- CFtime("days since 2001-01-01", "proleptic_gregorian", 0:999) # Days in a month expect_error(month_days("1-2-3")) expect_equal(month_days(cf), c(31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31)) expect_equal(month_days(cf2), c(31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31)) expect_equal(month_days(cf3), rep(30, 12)) expect_equal(month_days(cf4), c(31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31)) expect_equal(month_days(cf5), c(31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31)) expect_equal(month_days(cf6), c(31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31)) expect_error(month_days(cf, 4)) expect_warning(month_days(cf, c("2001-01-01", "abc")), "^Some dates could not be parsed") x <- c("2021-11-27", "2021-12-10", "2022-02-14", "2022-02-18", "2024-02-03", "2100-02-02") expect_equal(month_days(cf, x), c(30, 31, 28, 28, 29, 28)) expect_equal(month_days(cf2, x), c(30, 31, 28, 28, 29, 29)) expect_equal(month_days(cf3, x), rep(30, 6)) expect_equal(month_days(cf4, x), c(30, 31, 28, 28, 28, 28)) expect_equal(month_days(cf5, x), c(30, 31, 29, 29, 29, 29)) expect_equal(month_days(cf6, x), c(30, 31, 28, 28, 29, 28)) }) CFtime/tests/testthat/test-CFtime.R 0000644 0001762 0000144 00000014237 14760424753 016715 0 ustar ligges users test_that("test all variants of creating a CFtime object and useful functions", { # Call CFtime() without arguments expect_error(CFtime()) # Call CFtime() with an invalid definition expect_error(CFtime("foo")) expect_error(CFtime("zxcv since 1991-01-01")) expect_error(CFtime("days since -991-01-01")) # Call CFtime() with a valid definition and an invalid calendar expect_error(CFtime("days since 1991-01-01", "foo")) expect_equal(calendar(CFtime("days since 1991-01-01", NULL, NULL)), "standard") expect_equal(definition(CFtime("days since 1991-01-01", NULL, NULL)), "days since 1991-01-01") expect_equal(resolution(CFtime("days since 1991-01-01", NULL, NULL)), NA_real_) # Call CFtime() with a valid definition and calendar but faulty offsets expect_error(CFtime("days since 1991-01-01", "standard", c(0:10, NA))) # CFtime() with only a definition t <- CFtime("d per 1991-01-01") expect_equal(origin(t)[1:3], data.frame(year = 1991, month = 1, day = 1)) expect_equal(unit(t), "days") expect_equal(calendar(t), "standard") expect_equal(length(offsets(t)), 0L) # CFtime with only a definition and a calendar t <- CFtime("d per 1991-01-01", "julian") expect_match(capture_output(t$print()), "^CF calendar:") expect_match(capture_output(t$print()), "Elements: \\(no elements\\)\\n Bounds : \\(not set\\)$") expect_equal(origin(t)[1:3], data.frame(year = 1991, month = 1, day = 1)) expect_equal(unit(t), "days") expect_equal(calendar(t), "julian") expect_equal(length(offsets(t)), 0L) # CFtime with a single offset t <- t + 15 expect_equal(as_timestamp(t, "date"), "1991-01-16") expect_match(capture_output(t$print()), "Elements: 1991-01-16 \\n Bounds : not set$") # Invalid offsets expect_error(CFtime("d per 1991-01-01", "julian", c(TRUE, FALSE, FALSE))) # Character offsets t <- CFtime("hours since 2023-01-01", "360_day", "2023-04-30T23:00") expect_equal(range(t), c("2023-01-01 00:00:00", "2023-04-30 23:00:00")) expect_equal(length(as_timestamp(t, "timestamp")), 4 * 30 * 24) t$bounds <- TRUE expect_equal(t$range(bounds = TRUE), c("2022-12-30 23:30:00", "2023-04-30 23:30:00")) expect_warning(t <- CFtime("days since 2023-01-01", "366_day", c("2023-01-01", "2023-04-13", "2023-10-30", "2023-05-12"))) expect_equal(range(t), c("2023-01-01", "2023-10-30")) # Merge two CFtime instances / extend offsets t1 <- CFtime("hours since 2001-01-01", "360_day", 0:99) t2 <- CFtime("hours since 2001-01-01", "julian", 100:199) expect_false(t1 == t2) expect_error(t1 + t2) t2 <- CFtime("hours since 2001-01-01", "360_day", 100:199) expect_false(t1 == t2) expect_equal(length(offsets(t1 + t2)), 200) expect_equal(length(offsets(t1 + 100:199)), 200) t1 <- CFtime("days since 2022-01-01", "365_day", 0:364) t2 <- CFtime("days since 2023-01-01", "365_day", 0:364) expect_match(capture_output(t1$print()), "between 365 elements\\)\\n Bounds : not set$") expect_true(length(offsets(t1 + t2)) == 730) expect_true(all(range(diff(offsets(t1 + t2))) == c(1, 1))) expect_warning(t3 <- t2 + t1) expect_true(length(offsets(t3)) == 730) expect_false((range(diff(offsets(t3))) == c(1, 1))[1]) t2 <- t1 + c("2023-01-01", "2023-01-02") expect_match(capture_output(t1$print()), "between 365 elements\\)\\n Bounds : not set$") expect_equal(t2$resolution, 1) # Timezones expect_equal(timezone(t1), "+0000") expect_false(grepl("+0000", capture_output(t1$print()), fixed = TRUE)) t1 <- CFtime("days since 2022-01-01 00:00:00+04", "365_day", 0:364) expect_true(grepl("+0400", capture_output(t1$print()), fixed = TRUE)) # Time series completeness t <- CFtime("d per 1991-01-01", "julian") expect_error(is_complete("zxcv")) expect_true(is.na(is_complete(t))) expect_true(is_complete(t1)) mid_months <- c("1950-01-16T12:00:00", "1950-02-15T00:00:00", "1950-03-16T12:00:00", "1950-04-16T00:00:00", "1950-05-16T12:00:00", "1950-06-16T00:00:00", "1950-07-16T12:00:00", "1950-08-16T12:00:00", "1950-09-16T00:00:00", "1950-10-16T12:00:00", "1950-11-16T00:00:00", "1950-12-16T12:00:00") t <- CFtime("days since 1950-01-01", "standard", mid_months) expect_true(is_complete(t)) ty <- CFtime("years since 2020-01-01", "standard", 0:19) expect_true(is_complete(ty)) ty <- ty + 30:39 expect_false(is_complete(ty)) # Range t <- CFtime("days since 2001-01-01") expect_equal(range(t), c(NA_character_, NA_character_)) t <- t + 0:1 expect_error(range(t, 123)) expect_error(range(t, c("asd %d", "%F"))) expect_equal(range(t, "%Y-%B-%Od"), c("2001-January-01", "2001-January-02")) # Range on unsorted offsets random <- runif(100, min = 1, max = 99) expect_warning(t <- CFtime("days since 2001-01-01", offsets = c(0, random[1:50], 100, random[51:100]))) expect_equal(range(t), c("2001-01-01", paste0(as.Date("2001-01-01") + 100))) # Subsetting t <- CFtime("hours since 2023-01-01 00:00:00", "standard", 0:239) expect_error(slice("zxcv")) expect_true(all(slice(t, c("2023-01-01", "2023-02-01")))) expect_true(length(which(slice(t, c("2023-01-01", "2023-05-01")))) == 240) expect_true(length(which(slice(t, c("2023-01-01 00:00", "2023-01-01 04:00")))) == 4) expect_true(length(which(slice(t, c("2023-01-01 04:00", "2023-01-01 00:00")))) == 4) # extremes in reverse order expect_true(slice(t, c("2022-01-01", "2023-01-02"))[1]) # early extreme before timeseries expect_true(all(!slice(t, c("2023-02-01", "2023-03-01")))) # both extremes outside time series expect_equal(sum(slice(t, c("2023-01-01 00:00", "2023-01-01 04:00", "2023-01-02 00:00"))), 24) expect_equal(sum(slice(t, c("2023-01-01 00:00", "2023-01-01 04:00", "2023-01-02 00:00"), TRUE)), 25) }) test_that("Leap years on some calendars", { t <- CFTime$new("days since 2025-01-01", "360_day") expect_true(all(!t$cal$leap_year(c(2000:2025)))) t <- CFTime$new("days since 2025-01-01", "366_day") expect_true(all(t$cal$leap_year(c(2000:2025)))) }) test_that("Working with packages and files", { lf <- list.files(path = system.file("extdata", package = "CFtime"), full.names = TRUE) if (requireNamespace("ncdfCF")) lapply(lf, function(f) { nc <- ncdfCF::open_ncdf(f) expect_s3_class(nc[["time"]]$values, "CFTime") }) }) CFtime/tests/testthat.R 0000644 0001762 0000144 00000000610 14463144520 014546 0 ustar ligges users # This file is part of the standard setup for testthat. # It is recommended that you do not modify it. # # Where should you do additional test configuration? # Learn more about the roles of various files in: # * https://r-pkgs.org/testing-design.html#sec-tests-files-overview # * https://testthat.r-lib.org/articles/special-files.html library(testthat) library(CFtime) test_check("CFtime") CFtime/MD5 0000644 0001762 0000144 00000007715 14761426557 011764 0 ustar ligges users a5dfea654c9bc3ac09a68d29ee43bbb5 *DESCRIPTION ceda70a2b787c5d3feab27e28804846c *LICENSE 0b9a99e3fec5ed132e1909c6e5c9dc9b *NAMESPACE 45c5d1c55bb54e897e0f9cfad0d9c238 *NEWS.md 739c0fdc0b724bf22cda66dc32b30e41 *R/CFCalendar.R a8a3f3144708a6c8fe8cf34abc1c8983 *R/CFCalendar360.R 19277fbb8a12af1cb45ddcd412a9fe7e *R/CFCalendar365.R 58970ea16e54a4503e4cd741636c5950 *R/CFCalendar366.R 8163e3096e5b9a055d65bd08d937f4f8 *R/CFCalendarJulian.R 278fc848bce28c1e30c9a84e628bd35b *R/CFCalendarProleptic.R 6826e3f05aef71ec6c4f702d513e4211 *R/CFCalendarStandard.R 738eb16b9a0598d06860eeafbeea77ad *R/CFCalendarTAI.R 90b02f59cce4481dfcbc8a86fc069d9d *R/CFCalendarUTC.R ed1235155f2d01e30c99524af909215d *R/CFtime-package.R 3cb7237f3a9c1add975499def3785664 *R/CFtime.R 257f66f74a5b0ea2f15b8cf1a78f9954 *R/api.R a1955195bcfff851ebff52e2c1ec2a47 *R/deprecated.R 7d9c5b84e273f3137ff43263820d46df *R/helpers.R aa481a764e419190fa21a3f91b67724f *R/zzz.R f72f76479a1b8e2392a8c3d7db00a4a7 *README.md 99481c2e959f41b80a27e0276faf434f *TODO d9a261eed3a05c4250379ec1cd0b9d49 *build/vignette.rds 4f93ccfd17ec145e1f26f3e48adea2f1 *inst/doc/CFtime.R 93f2645009dd1790ce288fa70f09b9f6 *inst/doc/CFtime.Rmd d5ba52204c845e0ddf95777b2209ebb0 *inst/doc/CFtime.html 5e270ae6034b339c2a13d20a5f7d2dec *inst/doc/Conformance.R ff1c18028d0887093815b85a0f1a3bd9 *inst/doc/Conformance.Rmd 33b6d91935354a2e72df416b98ee4a01 *inst/doc/Conformance.html eb8f1917a87aa89451589fe737155c09 *inst/doc/Processing.R 6e6cb15688d7f267a85ce983c5db2882 *inst/doc/Processing.Rmd 3680d9971f9f685b93455182619696c9 *inst/doc/Processing.html 084c16d847979a81347ea1e449d81309 *inst/extdata/pr_day_GFDL-ESM4_ssp245_r1i1p1f1_gr1_20150101-20991231_v20180701.nc 8b213590d9bb653f257216e9feed4e5f *inst/extdata/pr_day_MRI-ESM2-0_ssp245_r1i1p1f1_gn_20150101-20991231_v20190603.nc 7412c2337903a2869c3ad4a124f5eabf *man/CFCalendar.Rd 09872b226cd0d51c186e5294d71858f1 *man/CFCalendar360.Rd fbd5316338f223ba84aaaa34aacef72b *man/CFCalendar365.Rd b3648ff4407c946da82aaf6bf710fc27 *man/CFCalendar366.Rd d4b8d9bc0fec1bb9965b209141c3cc53 *man/CFCalendarJulian.Rd 5e77f06e548b3ae668e3afa885fb7791 *man/CFCalendarProleptic.Rd b3c967bfeb5cb2f26738b4ef09baa33a *man/CFCalendarStandard.Rd 9a0d8ac261562e930c670c45ffd6e2ff *man/CFCalendarTAI.Rd c40e4bbb1ea64c0ed827e147cdc04a04 *man/CFCalendarUTC.Rd 01a4ead00de1f0dbb0723cf98c1cc6a4 *man/CFTime.Rd a8695422781313df7947dfede0a073fa *man/CFfactor.Rd 0eafe7d334947473e847411bc8df868c *man/CFfactor_coverage.Rd 3ddacc9428cc933f421f76a48419650b *man/CFfactor_units.Rd 2b3d94d9a9a051d3466a7634ba00b1fc *man/CFtime-function.Rd 4ce3903f42b42f5012d9d1e2d6c34fe0 *man/CFtime-package.Rd 99fc41c73429fd34a2377d347b0095c9 *man/as.character.CFTime.Rd 893185474e36cf7637ccedd3c2cd89ed *man/as_timestamp.Rd 928d669f50f1aef01aef6f54c0f2a937 *man/bounds.Rd f13d54347c78b995c8dc09486c07bd6e *man/cut.CFTime.Rd 36e13a1963c46c120d3125494873793f *man/deprecated_functions.Rd 892fc0082cc3642fbc10a67f09a62351 *man/equals-.CFTime.Rd 25f51a0bb55350284d2aeba3998c8bf2 *man/indexOf.Rd 6397630417e4c640d9f48cb5b6c0d9a0 *man/is_complete.Rd c10050093ad989a9ccdd2ecfcddd5037 *man/length.CFTime.Rd 41217fcff2402427262be1caf8f2ed85 *man/month_days.Rd 26a6be27bce47c0e824e2070bd69da38 *man/parse_timestamps.Rd b91ba75b7b280037963dcf0441265bd6 *man/plus-.CFTime.Rd c3132d53fd7a6632e1fe403ac1177f45 *man/properties.Rd c2eac9d491e84dadd8c496a328318d03 *man/range.CFTime.Rd b1c90878a67722a9566ff7bc7c7fbd20 *man/slab.Rd ab86a37b7410f554352df38fb9460fb4 *man/slice.Rd e351898df6245b569dbfb38a391f3d2d *tests/testthat.R 0e0a42c19c33307062c21e248101513a *tests/testthat/test-CFbounds.R 825e31f25b045ce91d56079dca30b3f9 *tests/testthat/test-CFformat.R ecdcddb577c8e9d9b6d8d2732fa08dd9 *tests/testthat/test-CFtime.R 9fa5a979324835f36bcd11fb5a75614d *tests/testthat/test-functions.R f776c59505ca89d719195df2e3255e2d *tests/testthat/test-parse_deparse.R 93f2645009dd1790ce288fa70f09b9f6 *vignettes/CFtime.Rmd ff1c18028d0887093815b85a0f1a3bd9 *vignettes/Conformance.Rmd 6e6cb15688d7f267a85ce983c5db2882 *vignettes/Processing.Rmd CFtime/R/ 0000755 0001762 0000144 00000000000 14761414434 011632 5 ustar ligges users CFtime/R/CFCalendarUTC.R 0000644 0001762 0000144 00000034773 14760025552 014267 0 ustar ligges users #' @title Coordinated Universal Time CF calendar #' #' @description This class represents a calendar based on the Coordinated #' Universal Time. Validity is from 1972 onwards, with dates represented using #' the Gregorian calendar, up to the present (so future timestamps are not #' allowed). Leap seconds are considered in all calculations. Also, time zone #' information is irrelevant and may not be given. #' #' In general, the calendar should use a unit of time of a second. Minute, #' hour and day are allowed but discouraged. Month and year as time unit are #' not allowed as there is no practical way to maintain leap second accuracy. #' #' @aliases CFCalendarUTC #' @docType class CFCalendarUTC <- R6::R6Class("CFCalendarUTC", inherit = CFCalendarProleptic, private = list( # Leap days in various incarnations. More will be added in initialize(). leapdays = data.frame( year = c(1972L, 1972L, 1973L, 1974L, 1975L, 1976L, 1977L, 1978L, 1979L, 1981L, 1982L, 1983L, 1985L, 1987L, 1989L, 1990L, 1992L, 1993L, 1994L, 1995L, 1997L, 1998L, 2005L, 2008L, 2012L, 2015L, 2016L), month = c(6L, 12L, 12L, 12L, 12L, 12L, 12L, 12L, 12L, 6L, 6L, 6L, 6L, 12L, 12L, 12L, 6L, 6L, 6L, 12L, 6L, 12L, 12L, 12L, 6L, 6L, 12L), day = c(30L, 31L, 31L, 31L, 31L, 31L, 31L, 31L, 31L, 30L, 30L, 30L, 30L, 31L, 31L, 31L, 30L, 30L, 30L, 31L, 30L, 31L, 31L, 31L, 30L, 30L, 31L), compound = c(19720630L, 19721231L, 19731231L, 19741231L, 19751231L, 19761231L, 19771231L, 19781231L, 19791231L, 19810630L, 19820630L, 19830630L, 19850630L, 19871231L, 19891231L, 19901231L, 19920630L, 19930630L, 19940630L, 19951231L, 19970630L, 19981231L, 20051231L, 20081231L, 20120630L, 20150630L, 20161231L), epochdays = c(182L, 366L, 731L, 1096L, 1461L, 1827L, 2192L, 2557L, 2922L, 3469L, 3834L, 4199L, 4930L, 5844L, 6575L, 6940L, 7487L, 7852L, 8217L, 8766L, 9313L, 9862L, 12419L, 13515L, 14792L, 15887L, 16437L) ), # Offset of the epoch from the origin, in calendar units. Set in initialize(). epoch = 0, # Rata Die of the epoch. epoch_rd = 719893L, # Number of leap seconds applied to the origin of self. Set in initialize(). origin_leapsecs = 0L, # Check if rows in a data.frame with seconds >= 60 are days when a leap # second was applied. Argument `cap` comes from method parse(). If not, # values in that row are set to `NA`. check_leap_seconds = function(cap) { # There will always be at least one row with 60 seconds or this method # would not be called leap_ndx <- which(cap$second >= 60) tst <- cap[leap_ndx, ] not2359 <- tst$hour != 23L | tst$minute != 59L tst <- tst$year * 10000L + tst$month * 100L + tst$day bad <- !(tst %in% private$leapdays$compound) cap[leap_ndx[bad | not2359], ] <- rep(NA, 7) cap } ), public = list( #' @description Create a new CF UTC calendar. #' @param nm The name of the calendar. This must be "utc". #' @param definition The string that defines the units and the origin, as #' per the CF Metadata Conventions. initialize = function(nm, definition) { super$initialize(nm, definition) if (self$unit > 4L) stop("Unit for an UTC calendar cannot be 'month' or 'year'.", call. = FALSE) # nocov # How many leap seconds have been applied to the origin? private$origin_leapsecs <- findInterval(self$origin$year * 10000L + self$origin$month * 100L + self$origin$day, private$leapdays$compound, left.open = TRUE) # Offset of the epoch from the origin private$epoch <- self$parse("1972-01-01")$offset # Add offsets in self calendar units of the leap seconds leapdates <- as.Date(paste(private$leapdays$year, private$leapdays$month, private$leapdays$day, sep = "-")) private$leapdays$seconds <- as.integer(c(unclass(difftime(leapdates, as.Date("1972-01-01"), units = "secs"))) + CFt$units$per_day[1L] + (1:27)) }, #' @description Indicate which of the supplied dates are valid. #' @param ymd `data.frame` with dates parsed into their parts in columns #' `year`, `month` and `day`. Any other columns are disregarded. #' @return Logical vector with the same length as argument `ymd` has rows #' with `TRUE` for valid days and `FALSE` for invalid days, or `NA` where #' the row in argument `ymd` has `NA` values. valid_days = function(ymd) { nw <- as.POSIXct(Sys.Date(), tz = "UTC") yr <- as.integer(substr(nw, 1, 4)) mon <- as.integer(substr(nw, 6, 7)) dt <- as.integer(substr(nw, 9, 10)) super$valid_days(ymd) & ymd$year >= 1972L & (ymd$year < yr | (ymd$year == yr & (ymd$month < mon | (ymd$month == mon & ymd$day <= dt)))) }, #' @description Parsing a vector of date-time character strings into parts. #' This includes any leap seconds. Time zone indications are not allowed. #' @param d character. A character vector of date-times. #' @return A `data.frame` with columns year, month, day, hour, minute, #' second, time zone, and offset. Invalid input data will appear as `NA`. #' Note that the time zone is always "+0000" and is included to maintain #' compatibility with results from other calendars. parse = function(d) { # Parsers # These parsers are specific to UTC, i.e. starting in 1972 and up to the # current year, and a possible leap second as value 60. # UDUNITS broken timestamp definition, with some changes # broken_timestamp {broken_date}({space|T}+{broken_clock})? -- T not in definition but present in lexer code # broken_date {year}-{month}(-{day})? # year [+-]?[0-9]{1,4} # month 0?[1-9]|1[0-2] # day 0?[1-9]|[1-2][0-9]|30|31 # broken_clock {hour}:{minute}(:{second})? # hour [0-1]?[0-9]|2[0-3] -- sign on hour not allowed # minute [0-5]?[0-9] # second 60|{minute}? # fractional part (\.[0-9]*)? broken <- paste0( "^", # anchor string at start "([+-]?[0-9]{1,4})", # year, with optional sign "-(0?[1-9]|1[012])", # month "(?:-(0?[1-9]|[12][0-9]|3[01]))?", # day, optional "(?:[T ]", # if a time is following, separate with a single whitespace character or a "T" "([01]?[0-9]|2[0-3])", # hour ":([0-5]?[0-9])", # minute "(?::([0-6]?[0-9]))?", # second, optional "(?:\\.([0-9]*))?", # optional fractional part of the smallest specified unit ")?", # close optional time capture group "(?:\\s", # if a time zone offset is following, separate with a single whitespace character "([+-])?([01]?[0-9]|2[0-3])", # tz hour, with optional sign "(?::(00|15|30|45))?", # optional tz minute, only 4 possible values ")?", # close optional timezone capture group "$" # anchor string at end ) iso8601 <- paste0( "^", "([0-9]{4})", "-(0[1-9]|1[012])", "-(0[1-9]|[12][0-9]|3[01])?", "(?:", "[T ]([01][0-9]|2[0-3])", "(?::([0-5][0-9]))?", "(?::([0-6][0-9]))?", "(?:[\\.,]([0-9]*))?", ")?", "(?:([Z+-])([01][0-9]|2[0-3])?(?::(00|15|30|45))?", ## FIXME: Z?, smaller number of captures ")?$" ) # UDUNITS packed timestamp definition - NOT YET USED # packed_timestamp {packed_date}({space|T}+{packed_clock})? -- T and space only allowed in packed time follows # packed_date {year}({month}{day}?)? -- must be YYYYMMDD or else format is ambiguous, as per lexer code # packed_clock {hour}({minute}{second}?)? -- must be HHMMSS to be unambiguous # packed <- stringi::stri_join( # "^", # anchor string at start # "([+-]?[0-9]{4})", # year, with optional sign # "(0[1-9]|1[012])?", # month, optional # "(0[1-9]|[12][0-9]|3[01])?", # day, optional # "(?:[T,\\s]", # if a time is following, separate with a single whitespace character or a "T" # "([01][0-9]|2[0-3])?", # hour # "([0-5][0-9])?", # minute, optional # "([0-5]?[0-9](?:\\.[0-9]*)?)?", # second, optional, with optional fractional part # ")?", # close optional time capture group # "$" # anchor string at end # ) parse <- data.frame(year = integer(), month = integer(), day = integer(), hour = integer(), minute = integer(), second = numeric(), frac = character(), tz_sign = character(), tz_hour = character(), tz_min = character()) cap <- utils::strcapture(iso8601, d, parse) missing <- which(is.na(cap$year)) if (length(missing) > 0L) cap[missing,] <- utils::strcapture(broken, d[missing], parse) # Assign any fraction to the appropriate time part cap$frac[is.na(cap$frac)] <- "0" frac <- as.numeric(paste0("0.", cap$frac)) if (sum(frac) > 0) { ndx <- which(!(is.na(cap$second)) & frac > 0) if (length(ndx) > 0L) cap$second[ndx] <- cap$second[ndx] + frac[ndx] ndx <- which(!(is.na(cap$minute)) & is.na(cap$second) & frac > 0) if (length(ndx) > 0L) cap$second[ndx] <- 60L * frac[ndx] ndx <- which(!(is.na(cap$hour)) & is.na(cap$minute) & frac > 0) if (length(ndx) > 0L) { secs <- 3600 * frac cap$minute[ndx] <- secs[ndx] %/% 60 cap$second[ndx] <- secs[ndx] %% 60 } } cap$frac <- NULL # Convert NA time parts to 0 - in CF default time is 00:00:00 when not specified cap$hour[is.na(cap$hour)] <- 0L cap$minute[is.na(cap$minute)] <- 0L cap$second[is.na(cap$second)] <- 0L # Set timezone to 00:00 to align cap data.frame with other calendars cap$tz_sign <- cap$tz_hour <- cap$tz_min <- NULL cap$tz <- "+0000" # Set optional date parts to 1 if not specified cap$month[is.na(cap$month)] <- 1L cap$day[is.na(cap$day)] <- 1L # Check date validity invalid <- !self$valid_days(cap) invalid[is.na(invalid)] <- TRUE if (sum(invalid) > 0L) cap[invalid,] <- rep(NA, 7) # Check that any supplied leap seconds coincide with official leap seconds if (any(cap$second >= 60, na.rm = TRUE)) cap <- private$check_leap_seconds(cap) # Calculate offsets if (nrow(self$origin) == 0L) { # if there's no origin yet, don't calculate offsets cap$offset <- rep(0, nrow(cap)) # this happens, f.i., when a CFCalendar is created } else { days <- self$date2offset(cap) chkdays <- cap$year * 10000L + cap$month * 100L + cap$day leapsecs <- findInterval(chkdays, private$leapdays$compound, left.open = TRUE) - private$origin_leapsecs cap$offset <- round((days * 86400 + (cap$hour - self$origin$hour[1]) * 3600 + (cap$minute - self$origin$minute[1]) * 60 + cap$second - self$origin$second + leapsecs) / CFt$units$seconds[self$unit], 9) } cap }, #' @description Decompose a vector of offsets, in units of the calendar, to #' their timestamp values. This adds a specified amount of time to the #' origin of a `CFTime` object. #' @param offsets Vector of numeric offsets to add to the origin of the #' calendar. #' @return A `data.frame` with columns for the timestamp elements and as #' many rows as there are offsets. offsets2time = function(offsets) { len <- length(offsets) if(len == 0L) return(data.frame(year = integer(), month = integer(), day = integer(), hour = integer(), minute = integer(), second = numeric(), tz = character(), offset = numeric())) # Base offsets on epoch, in seconds off <- (offsets - private$epoch) * CFt$units$seconds[self$unit] ndx <- findInterval(off, private$leapdays$seconds) remainder <- off - ifelse(ndx > 0L, private$leapdays$seconds[ndx], 0L) secs <- mins <- hrs <- days <- vector("numeric", len) frac <- rep(0, len) leap <- ndx < 27L & round(private$leapdays$seconds[ndx + 1L] - off, 5) <= 1L if (sum(leap) > 0L) { secs[leap] <- 60 mins[leap] <- 59 hrs[leap] <- 23 days[leap] <- ifelse(ndx[leap] == 0L, private$leapdays$epochdays[1L], private$leapdays$epochdays[ndx + 1L] - private$leapdays$epochdays[ndx]) - 1L } not_leap <- !leap if (sum(not_leap) > 0L) { # Convert remainder to time parts, no leap seconds anymore days[not_leap] <- remainder[not_leap] %/% 86400L # overflow days secs[not_leap] <- round(remainder[not_leap] %% 86400L, 3L) # round down to milli-seconds to avoid errors #frac[not_leap] <- secs[not_leap] %% 1 #secs[not_leap] <- secs[not_leap] %/% 1 # Time elements for output hrs[not_leap] <- secs[not_leap] %/% 3600L mins[not_leap] <- (secs[not_leap] %% 3600L) %/% 60L secs[not_leap] <- secs[not_leap] %% 60L } # Now add days using the calendar epoch_days <- ifelse(ndx > 0L, private$leapdays$epochdays[ndx], 0L) out <- if (any(days != 0L)) .gregorian_offset2date(as.integer(days) + private$epoch_rd + epoch_days) else data.frame(year = rep(self$origin$year, len), month = rep(self$origin$month, len), day = rep(self$origin$day, len)) # Put it all back together again out$hour <- as.integer(hrs) out$minute <- as.integer(mins) out$second <- secs #if (sum(frac) > 0) # out$second <- out$second + frac out$tz <- rep("+0000", len) out$offset <- offsets out } ) ) CFtime/R/deprecated.R 0000644 0001762 0000144 00000003501 14757557554 014074 0 ustar ligges users #' @name deprecated_functions #' @title Deprecated functions #' #' @description #' These functions are deprecated and should no longer be used in new code. The #' below table gives the replacement function to use instead. The function #' arguments of the replacement function are the same as those of the deprecated #' function if no arguments are given in the table. #' #' | **Deprecated function** | **Replacement function** | #' | ------------------- | -------------------- | #' | CFcomplete() | [is_complete()] | #' | CFmonth_days() | [month_days()] | #' | CFparse() | [parse_timestamps()] | #' | CFrange() | [range()] | #' | CFsubset() | [slab()] | #' | CFtimestamp() | [as_timestamp()] | #' #' @param t,x,format,asPOSIX,extremes See replacement functions. #' #' @returns See replacement functions. # nocov start #' @rdname deprecated_functions #' @export CFtimestamp <- function(t, format = NULL, asPOSIX = FALSE) { warning("Function `CFtimestamp()` is deprecated. Use `as_timestamp()` instead.") as_timestamp(t, format, asPOSIX) } #' @rdname deprecated_functions #' @export CFmonth_days <- function(t, x = NULL) { warning("Function `CFmonth_days()` is deprecated. Use `month_days()` instead.") month_days(t, x) } #' @rdname deprecated_functions #' @export CFcomplete <- function(x) { warning("Function `CFcomplete()` is deprecated. Use `is_complete()` instead.") is_complete(x) } #' @rdname deprecated_functions #' @export CFsubset <- function(x, extremes) { warning("Function `CFsubset()` is deprecated. Use `slab()` instead.") slab(x, extremes) } #' @rdname deprecated_functions #' @export CFparse <- function(t, x) { warning("Function `CFparse()` is deprecated. Use `parse_timestamps()` instead.") parse_timestamps(t, x) } # nocov end CFtime/R/helpers.R 0000644 0001762 0000144 00000016350 14726273725 013433 0 ustar ligges users # Internal functions # # The functions in this source file are for internal use only. # ============================================================================== # Offsets and timestamp formatting #' Validate offsets passed into a CFTime instance #' #' Tests the `offsets` values. Throws an error if the argument contains `NA` values. #' #' @param offsets The offsets to test #' #' @returns logical. `TRUE` if the offsets are valid, throws an error otherwise. #' @noRd .validOffsets <- function(offsets) { if (any(is.na(offsets))) stop("Offsets cannot contain `NA` values.", call. = FALSE) TRUE } #' Formatting of time strings from time elements #' #' This is an internal function that should not generally be used outside of #' the CFtime package. #' #' @param t A `data.frame` representing timestamps. #' #' @returns A vector of character strings with a properly formatted time. If any #' timestamp has a fractional second part, then all time strings will report #' seconds at milli-second precision. #' @noRd .format_time <- function(t) { fsec <- t$second %% 1L if (any(fsec > 0L)) { paste0(sprintf("%02d:%02d:", t$hour, t$minute), ifelse(t$second < 10, "0", ""), sprintf("%.3f", t$second)) } else { sprintf("%02d:%02d:%02d", t$hour, t$minute, t$second) } } #' Do the time elements have time-of-day information? #' #' If any time information > 0, then `TRUE` otherwise `FALSE`. #' #' This is an internal function that should not generally be used outside of #' the CFtime package. #' #' @param t A `data.frame` representing timestamps. #' #' @returns `TRUE` if any timestamp has time-of-day information, `FALSE` otherwise. #' @noRd .has_time <- function(t) { any(t$hour > 0) || any(t$minute > 0) || any(t$second > 0) } #' Do formatting of timestamps with format specifiers #' #' @param ts `data.frame` of decomposed offsets. #' @param tz Time zone character string. #' @param format A character string with the format specifiers, or #' "date" or "timestamp". #' @returns Character vector of formatted timestamps. #' @noRd .format_format <- function(ts, tz, format) { if (format == "") format <- "timestamp" if (format == "timestamp" && sum(ts$hour, ts$minute, ts$second) == 0) format <- "date" if (format == "date") return(sprintf("%04d-%02d-%02d", ts$year, ts$month, ts$day)) else if (format == "timestamp") return(sprintf("%04d-%02d-%02d %s", ts$year, ts$month, ts$day, .format_time(ts))) # Expand any composite specifiers format <- stringr::str_replace_all(format, c("%F" = "%Y-%m-%d", "%R" = "%H:%M", "%T" = "%H:%M:%S")) # Splice in timestamp values for specifiers # nocov start if (grepl("%b|%h", format[1])) { mon <- strftime(ISOdatetime(2024, 1:12, 1, 0, 0, 0), "%b") format <- stringr::str_replace_all(format, "%b|%h", mon[ts$month]) } if (grepl("%B", format[1])) { mon <- strftime(ISOdatetime(2024, 1:12, 1, 0, 0, 0), "%B") format <- stringr::str_replace_all(format, "%B", mon[ts$month]) } # nocov end format <- stringr::str_replace_all(format, "%[O]?d", sprintf("%02d", ts$day)) format <- stringr::str_replace_all(format, "%e", sprintf("%2d", ts$day)) format <- stringr::str_replace_all(format, "%[O]?H", sprintf("%02d", ts$hour)) format <- stringr::str_replace_all(format, "%[O]?I", sprintf("%02d", ts$hour %% 12)) format <- stringr::str_replace_all(format, "%[O]?m", sprintf("%02d", ts$month)) format <- stringr::str_replace_all(format, "%[O]?M", sprintf("%02d", ts$minute)) format <- stringr::str_replace_all(format, "%p", ifelse(ts$hour < 12, "AM", "PM")) format <- stringr::str_replace_all(format, "%S", sprintf("%02d", as.integer(ts$second))) format <- stringr::str_replace_all(format, "%[E]?Y", sprintf("%04d", ts$year)) format <- stringr::str_replace_all(format, "%z", tz) format <- stringr::str_replace_all(format, "%%", "%") format } # ============================================================================== # Other internal functions #' Calculate time units in factors #' #' @param f factor. Factor as generated by `CFfactor()`. #' @param cal `CFCalendar` instance of the `CFTime` instance. #' @param upd numeric. Number of units per day, from the `CFt` environment. #' @returns A vector as long as the number of levels in the factor. #' @noRd .factor_units <- function(f, cal, upd) { period <- attr(f, "period") cal_class <- class(cal)[1L] res <- if (period == "day") rep(1L, nlevels(f)) else if (cal_class == "CFCalendar360") { rep(c(360L, 90L, 90L, 30L, 10L, 1L)[which(CFt$factor_periods == period)], nlevels(f)) } else { if (attr(f, "era") > 0L) { if (cal_class == "CFCalendar366") { switch(period, "year" = rep(366L, nlevels(f)), "season" = c(91L, 92L, 92L, 91L)[as.integer(substr(levels(f), 2, 2))], "quarter" = c(91L, 91L, 92L, 92L)[as.integer(levels(f))], "month" = c(31L, 29L, 31L, 30L, 31L, 30L, 31L, 31L, 30L, 31L, 30L, 31L)[as.integer(levels(f))], "dekad" = { dk <- as.integer(substr(levels(f), 2L, 3L)) ifelse(dk %% 3L > 0L | dk %in% c(12L, 18L, 27L, 33L), 10L, ifelse(dk %in% c(3L, 9L, 15L, 21L, 24L, 30L, 36L), 11L, 9L)) } ) } else { switch(period, "year" = rep(365L, nlevels(f)), "season" = c(90L, 92L, 92L, 91L)[as.integer(substr(levels(f), 2, 2))], "quarter" = c(90L, 91L, 92L, 92L)[as.integer(substr(levels(f), 2, 2))], "month" = c(31L, 28L, 31L, 30L, 31L, 30L, 31L, 31L, 30L, 31L, 30L, 31L)[as.integer(levels(f))], "dekad" = { dk <- as.integer(substr(levels(f), 2L, 3L)) ifelse(dk %% 3L > 0L | dk %in% c(12L, 18L, 27L, 33L), 10L, ifelse(dk %in% c(3L, 9L, 15L, 21L, 24L, 30L, 36L), 11L, 8L)) } ) } } else { # not an era factor switch(period, "year" = ifelse(cal$leap_year(as.integer(levels(f))), 366L, 365L), "season" = { year <- as.integer(substr(levels(f), 1L, 4L)) season <- as.integer(substr(levels(f), 6L, 6L)) ifelse(cal$leap_year(year), c(91L, 92L, 92L, 91L)[season], c(90L, 92L, 92L, 91L)[season]) }, "quarter" = { year <- as.integer(substr(levels(f), 1L, 4L)) qtr <- as.integer(substr(levels(f), 6L, 6L)) ifelse(cal$leap_year(year), c(91L, 91L, 92L, 92L)[qtr], c(90L, 91L, 92L, 92L)[qtr]) }, "month" = { year <- as.integer(substr(levels(f), 1L, 4L)) month <- as.integer(substr(levels(f), 6L, 7L)) ifelse(cal$leap_year(year), c(31L, 29L, 31L, 30L, 31L, 30L, 31L, 31L, 30L, 31L, 30L, 31L)[month], c(31L, 28L, 31L, 30L, 31L, 30L, 31L, 31L, 30L, 31L, 30L, 31L)[month]) }, "dekad" = { year <- as.integer(substr(levels(f), 1L, 4L)) dk <- as.integer(substr(levels(f), 6L, 7L)) ifelse(dk %% 3L > 0L | dk %in% c(12L, 18L, 27L, 33L), 10L, ifelse(dk %in% c(3L, 9L, 15L, 21L, 24L, 30L, 36L), 11L, ifelse(cal$leap_year(year), 9L, 8L))) } ) } } res * upd } CFtime/R/CFCalendarTAI.R 0000644 0001762 0000144 00000002174 14732045567 014246 0 ustar ligges users #' @title International Atomic Time CF calendar #' #' @description This class represents a calendar based on the International #' Atomic Time. Validity is from 1958 onwards, with dates represented using #' the Gregorian calendar. Given that this "calendar" is based on a universal #' clock, the concepts of leap second, time zone and daylight savings time do #' not apply. #' #' @aliases CFCalendarTAI #' @docType class CFCalendarTAI <- R6::R6Class("CFCalendarTAI", inherit = CFCalendarProleptic, public = list( #' @description Indicate which of the supplied dates are valid. #' @param ymd `data.frame` with dates parsed into their parts in columns #' `year`, `month` and `day`. If present, the `tz` column is checked for #' illegal time zone offsets. Any other columns are disregarded. #' @return Logical vector with the same length as argument `ymd` has rows #' with `TRUE` for valid days and `FALSE` for invalid days, or `NA` where #' the row in argument `ymd` has `NA` values. valid_days = function(ymd) { super$valid_days(ymd) & ymd$year >= 1958L # FIXME: TZ offsets } ) ) CFtime/R/zzz.R 0000644 0001762 0000144 00000001546 14733774704 012630 0 ustar ligges users #nocov start # Create environment for global CFtime variables CFt <- new.env(parent = emptyenv()) .onLoad <- function(libname, pkgname) { assign("CFunits", data.frame(unit = c("years", "year", "yr", "months", "month", "mon", "days", "day", "d", "hours", "hour", "hr", "h", "minutes", "minute", "min", "seconds", "second", "sec", "s"), id = c(6L, 6L, 6L, 5L, 5L, 5L, 4L, 4L, 4L, 3L, 3L, 3L, 3L, 2L, 2L, 2L, 1L, 1L, 1L, 1L)), envir = CFt) assign("units", data.frame(name = c("seconds", "minutes", "hours", "days", "months", "years"), seconds = c(1L, 60L, 3600L, 86400L, 86400L * 30L, 86400L * 365L), per_day = c(86400, 1440, 24, 1, 1/30, 1/365)), envir = CFt) assign("factor_periods", c("year", "season", "quarter", "month", "dekad", "day"), envir = CFt) } #nocov end CFtime/R/CFCalendar.R 0000644 0001762 0000144 00000040540 14760025054 013675 0 ustar ligges users #' @import R6 NULL #' @title Basic CF calendar #' #' @description This class represents a basic CF calendar. It should not be #' instantiated directly; instead, use one of the descendant classes. #' #' This internal class stores the information to represent date and time #' values using the CF conventions. An instance is created by the exported #' [CFTime] class, which also exposes the relevant properties of this class. #' #' The following calendars are supported: #' #' \itemize{ #' \item [`gregorian\standard`][CFCalendarStandard], the international standard calendar for civil use. #' \item [`proleptic_gregorian`][CFCalendarProleptic], the standard calendar but extending before 1582-10-15 #' when the Gregorian calendar was adopted. #' \item [`tai`][CFCalendarTAI], International Atomic Time clock with dates expressed using the Gregorian calendar. #' \item [`utc`][CFCalendarUTC], Coordinated Universal Time clock with dates expressed using the Gregorian calendar. #' \item [`julian`][CFCalendarJulian], every fourth year is a leap year (so including the years 1700, 1800, 1900, 2100, etc). #' \item [`noleap\365_day`][CFCalendar365], all years have 365 days. #' \item [`all_leap\366_day`][CFCalendar366], all years have 366 days. #' \item [`360_day`][CFCalendar360], all years have 360 days, divided over 12 months of 30 days. #' } #' @references #' https://cfconventions.org/Data/cf-conventions/cf-conventions-1.12/cf-conventions.html#calendar #' @docType class CFCalendar <- R6::R6Class("CFCalendar", public = list( #' @field name Descriptive name of the calendar, as per the CF Metadata #' Conventions. name = "", #' @field definition The string that defines the units and the origin, as #' per the CF Metadata Conventions. definition = "", #' @field unit The numeric id of the unit of the calendar. unit = -1L, #' @field origin `data.frame` with fields for the origin of the calendar. origin = data.frame(), #' @description Create a new CF calendar. #' @param nm The name of the calendar. This must follow the CF Metadata #' Conventions. #' @param definition The string that defines the units and the origin, as #' per the CF Metadata Conventions. initialize = function(nm, definition) { stopifnot(length(definition) == 1L, length(nm) == 1L) self$name <- tolower(nm) self$definition <- definition parts <- strsplit(definition, " ")[[1L]] if ((length(parts) < 3L) || !(tolower(parts[2L]) %in% c("since", "after", "from", "ref", "per"))) stop("Definition string does not appear to be a CF-compliant time coordinate description", call. = FALSE) u <- which(CFt$CFunits$unit == tolower(parts[1L])) if (length(u) == 0L) stop("Unsupported unit: ", parts[1L], call. = FALSE) self$unit <- CFt$CFunits$id[u] dt <- self$parse(paste(parts[3L:length(parts)], collapse = " ")) if (is.na(dt$year[1L])) stop("Definition string does not appear to be a CF-compliant time coordinate description: invalid base date specification", call. = FALSE) self$origin <- dt }, #' @description Print information about the calendar to the console. #' @param ... Ignored. #' @return `self`, invisibly. print = function(...) { tz <- self$timezone if (tz == "+0000") tz <- "" cat("CF calendar:", "\n Origin : ", self$origin_date, " ", self$origin_time, tz, "\n Units : ", CFt$units$name[self$unit], "\n Type : ", self$name, "\n", sep = "") invisible(self) }, #' @description Indicate which of the supplied dates are valid. #' @param ymd `data.frame` with dates parsed into their parts in columns #' `year`, `month` and `day`. Any other columns are disregarded. #' @return `NULL`. A warning will be generated to the effect that a #' descendant class should be used for this method. valid_days = function(ymd) { warning("Use a descendant class from `CFCalendar` to call this method.", call. = FALSE) # nocov NULL # nocov }, #' @description Indicate if the time series described using this calendar #' can be safely converted to a standard date-time type (`POSIXct`, #' `POSIXlt`, `Date`). #' #' Only the 'standard' calendar and the 'proleptic_gregorian' calendar #' when all dates in the time series are more recent than 1582-10-15 #' (inclusive) can be safely converted, so this method returns `FALSE` by #' default to cover the majority of cases. #' @param offsets The offsets from the CFtime instance. #' @return `FALSE` by default. POSIX_compatible = function(offsets) { FALSE }, #' @description This method tests if the `CFCalendar` instance in argument #' `cal` is compatible with `self`, meaning that they are of the same #' class and have the same unit. Calendars "standard", and "gregorian" are #' compatible, as are the pairs of "365_day" and "no_leap", and "366_day" #' and "all_leap". #' @param cal Instance of a descendant of the `CFCalendar` class. #' @return `TRUE` if the instance in argument `cal` is compatible with #' `self`, `FALSE` otherwise. is_compatible = function(cal) { self$unit == cal$unit && class(self)[1L] == class(cal)[1L] }, #' @description This method tests if the `CFCalendar` instance in argument #' `cal` is equivalent to `self`, meaning that they are of the same class, #' have the same unit, and equivalent origins. Calendars "standard", and #' "gregorian" are equivalent, as are the pairs of "365_day" and #' "no_leap", and "366_day" and "all_leap". #' #' Note that the origins need not be identical, but their parsed values #' have to be. "2000-01" is parsed the same as "2000-01-01 00:00:00", for #' instance. #' @param cal Instance of a descendant of the `CFCalendar` class. #' @return `TRUE` if the instance in argument `cal` is equivalent to #' `self`, `FALSE` otherwise. is_equivalent = function(cal) { sum(self$origin[1L,1L:6L] == cal$origin[1L,1L:6L]) == 6L && # Offset column is NA self$is_compatible(cal) }, #' @description Parsing a vector of date-time character strings into parts. #' @param d character. A character vector of date-times. #' @return A `data.frame` with columns year, month, day, hour, minute, #' second, time zone, and offset. Invalid input data will appear as `NA`. parse = function(d) { # Parsers # UDUNITS broken timestamp definition, with some changes # broken_timestamp {broken_date}({space|T}+{broken_clock})? -- T not in definition but present in lexer code # broken_date {year}-{month}(-{day})? # year [+-]?[0-9]{1,4} # month 0?[1-9]|1[0-2] # day 0?[1-9]|[1-2][0-9]|30|31 # broken_clock {hour}:{minute}(:{second})? # hour [0-1]?[0-9]|2[0-3] -- sign on hour not allowed, but see timezone # minute [0-5]?[0-9] # second {minute}? -- leap second not supported # fractional part (\.[0-9]*)? # timezone [+-]?{hour}(:{minute})? -- added, present in lexer code broken <- paste0( "^", # anchor string at start "([+-]?[0-9]{1,4})", # year, with optional sign "-(0?[1-9]|1[012])", # month "(?:-(0?[1-9]|[12][0-9]|3[01]))?", # day, optional "(?:[T ]", # if a time is following, separate with a single whitespace character or a "T" "([01]?[0-9]|2[0-3])", # hour ":([0-5]?[0-9])", # minute "(?::([0-5]?[0-9]))?", # second, optional "(?:\\.([0-9]*))?", # optional fractional part of the smallest specified unit ")?", # close optional time capture group "(?:\\s", # if a time zone offset is following, separate with a single whitespace character "([+-])?([01]?[0-9]|2[0-3])", # tz hour, with optional sign "(?::(00|15|30|45))?", # optional tz minute, only 4 possible values ")?", # close optional timezone capture group "$" # anchor string at end ) iso8601 <- paste0( "^", "([0-9]{4})", "-(0[1-9]|1[012])", "-(0[1-9]|[12][0-9]|3[01])?", "(?:", "[T ]([01][0-9]|2[0-3])", "(?::([0-5][0-9]))?", "(?::([0-5][0-9]))?", "(?:[\\.,]([0-9]*))?", ")?", "(?:([Z+-])([01][0-9]|2[0-3])?(?::(00|15|30|45))?", ## FIXME: Z?, smaller number of captures ")?$" ) # UDUNITS packed timestamp definition - NOT YET USED # packed_timestamp {packed_date}({space|T}+{packed_clock})? -- T and space only allowed in packed time follows # packed_date {year}({month}{day}?)? -- must be YYYYMMDD or else format is ambiguous, as per lexer code # packed_clock {hour}({minute}{second}?)? -- must be HHMMSS to be unambiguous # timezone [+-]?{hour}({minute})? -- added, present in lexer code, must be HHMM # packed <- stringi::stri_join( # "^", # anchor string at start # "([+-]?[0-9]{4})", # year, with optional sign # "(0[1-9]|1[012])?", # month, optional # "(0[1-9]|[12][0-9]|3[01])?", # day, optional # "(?:[T,\\s]", # if a time is following, separate with a single whitespace character or a "T" # "([01][0-9]|2[0-3])?", # hour # "([0-5][0-9])?", # minute, optional # "([0-5]?[0-9](?:\\.[0-9]*)?)?", # second, optional, with optional fractional part # ")?", # close optional time capture group # "(?:\\s", # if a time zone offset is following, separate with a single whitespace character # "([+-]?[01][0-9]|2[0-3])?", # hour, with optional sign # "(00|15|30|45)?", # minute, only 4 possible values # ")?", # close optional timezone capture group # "$" # anchor string at end # ) parse <- data.frame(year = integer(), month = integer(), day = integer(), hour = integer(), minute = integer(), second = numeric(), frac = character(), tz_sign = character(), tz_hour = character(), tz_min = character()) # Drop "UTC", if given d <- trimws(gsub("UTC$", "", d)) cap <- utils::strcapture(iso8601, d, parse) missing <- which(is.na(cap$year)) if (length(missing) > 0L) cap[missing,] <- utils::strcapture(broken, d[missing], parse) # Assign any fraction to the appropriate time part cap$frac[is.na(cap$frac)] <- "0" frac <- as.numeric(paste0("0.", cap$frac)) if (sum(frac) > 0) { ndx <- which(!(is.na(cap$second)) & frac > 0) if (length(ndx) > 0L) cap$second[ndx] <- cap$second[ndx] + frac[ndx] ndx <- which(!(is.na(cap$minute)) & is.na(cap$second) & frac > 0) if (length(ndx) > 0L) cap$second[ndx] <- 60L * frac[ndx] ndx <- which(!(is.na(cap$hour)) & is.na(cap$minute) & frac > 0) if (length(ndx) > 0L) { secs <- 3600 * frac cap$minute[ndx] <- secs[ndx] %/% 60 cap$second[ndx] <- secs[ndx] %% 60 } } cap$frac <- NULL # Convert NA time parts to 0 - in CF default time is 00:00:00 when not specified cap$hour[is.na(cap$hour)] <- 0L cap$minute[is.na(cap$minute)] <- 0L cap$second[is.na(cap$second)] <- 0L # Set timezone to default value where needed ndx <- which(cap$tz_sign == "Z") if (length(ndx) > 0L) { cap$tz_sign[ndx] <- "+" cap$tz_hour[ndx] <- "00" cap$tz_min[ndx] <- "00" } cap$tz <- paste0(ifelse(cap$tz_sign == "", "+", cap$tz_sign), ifelse(cap$tz_hour == "", "00", cap$tz_hour), ifelse(cap$tz_min == "", "00", cap$tz_min)) cap$tz <- ifelse(cap$tz =="NANANA", "+0000", cap$tz) cap$tz_sign <- cap$tz_hour <- cap$tz_min <- NULL # Set optional date parts to 1 if not specified cap$month[is.na(cap$month)] <- 1L cap$day[is.na(cap$day)] <- 1L # Check date validity invalid <- !self$valid_days(cap) invalid[is.na(invalid)] <- TRUE if (sum(invalid) > 0L) cap[invalid,] <- rep(NA, 7) # Calculate offsets if (nrow(self$origin) == 0L) { # if there's no origin yet, don't calculate offsets cap$offset <- rep(0, nrow(cap)) # this happens, f.i., when a CFCalendar is created } else { days <- self$date2offset(cap) cap$offset <- round((days * 86400 + (cap$hour - self$origin$hour[1]) * 3600 + (cap$minute - self$origin$minute[1]) * 60 + cap$second - self$origin$second) / CFt$units$seconds[self$unit], 6) } cap }, #' @description Decompose a vector of offsets, in units of the calendar, to #' their timestamp values. This adds a specified amount of time to the #' origin of a `CFTime` object. #' #' This method may introduce inaccuracies where the calendar unit is #' "months" or "years", due to the ambiguous definition of these units. #' @param offsets Vector of numeric offsets to add to the origin of the #' calendar. #' @return A `data.frame` with columns for the timestamp elements and as #' many rows as there are offsets. offsets2time = function(offsets = NULL) { if(is.null(offsets) || (len <- length(offsets)) == 0L) return(data.frame(year = integer(), month = integer(), day = integer(), hour = integer(), minute = integer(), second = numeric(), tz = character(), offset = numeric())) if (self$unit <= 4L) { # Days, hours, minutes, seconds # First add time: convert to seconds first, then recompute time parts secs <- offsets * CFt$units$seconds[self$unit] + self$origin$hour * 3600 + self$origin$minute * 60 + self$origin$second days <- secs %/% 86400L # overflow days secs <- round(secs %% 86400L, 3L) # drop overflow days from time, round down to milli-seconds to avoid errors # Time elements for output hrs <- secs %/% 3600L mins <- (secs %% 3600L) %/% 60L secs <- secs %% 60L # Now add days using the calendar out <- if (any(days != 0L)) self$offset2date(days) else data.frame(year = rep(self$origin$year, len), month = rep(self$origin$month, len), day = rep(self$origin$day, len)) # Put it all back together again out$hour <- hrs out$minute <- mins out$second <- secs out$tz <- rep(self$timezone, len) } else { # Months, years out <- self$origin[rep(1L, len), ] if (self$unit == 5L) { # Offsets are months months <- out$month + offsets - 1L out$month <- months %% 12L + 1L out$year <- out$year + months %/% 12L } else { # Offsets are years out$year <- out$year + offsets } } out$offset <- offsets out } ), active = list( #' @field origin_date (read-only) Character string with the date of the #' calendar. origin_date = function(value) { if (missing(value)) { sprintf("%04d-%02d-%02d", self$origin$year, self$origin$month, self$origin$day) } }, #' @field origin_time (read-only) Character string with the time of the #' calendar. origin_time = function(value) { if (missing(value)) { .format_time(self$origin) } }, #' @field timezone (read-only) Character string with the time zone of the #' origin of the calendar. timezone = function(value) { if (missing(value)) self$origin$tz } ) ) CFtime/R/CFCalendarProleptic.R 0000644 0001762 0000144 00000015027 14757614144 015573 0 ustar ligges users #' @title Proleptic Gregorian CF calendar #' #' @description This class represents a standard CF calendar, but with the #' Gregorian calendar extended backwards to before the introduction of the #' Gregorian calendar. This calendar is compatible with the standard POSIXt #' calendar, but note that daylight savings time is not considered. #' #' This calendar includes dates 1582-10-14 to 1582-10-05 (the gap between the #' Gregorian and Julian calendars, which is observed by the standard #' calendar), and extends to years before the year 1, including year 0. #' #' @aliases CFCalendarProleptic #' @docType class CFCalendarProleptic <- R6::R6Class("CFCalendarProleptic", inherit = CFCalendar, private = list( # Rata Die, the number of days from the day before 0001-01-01 to # origin of this calendar. Used to convert offsets from the calendar origin # to the day before 0001-01-01 for arithmetic calculations. rd = 0L ), public = list( #' @description Create a new CF calendar. #' @param nm The name of the calendar. This must be "proleptic_gregorian". #' This argument is superfluous but maintained to be consistent with the #' initialization methods of the parent and sibling classes. #' @param definition The string that defines the units and the origin, as #' per the CF Metadata Conventions. #' @return A new instance of this class. initialize = function(nm, definition) { super$initialize(nm, definition) private$rd <- .gregorian_date2offset(self$origin, self$leap_year(self$origin$year)) }, #' @description Indicate which of the supplied dates are valid. #' @param ymd `data.frame` with dates parsed into their parts in columns #' `year`, `month` and `day`. Any other columns are disregarded. #' @return Logical vector with the same length as argument `ymd` has rows #' with `TRUE` for valid days and `FALSE` for invalid days, or `NA` where #' the row in argument `ymd` has `NA` values. valid_days = function(ymd) { ymd$year & ymd$month >= 1L & ymd$month <= 12L & ymd$day >= 1L & ifelse(self$leap_year(ymd$year), ymd$day <= c(31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31)[ymd$month], ymd$day <= c(31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31)[ymd$month]) }, #' @description Determine the number of days in the month of the calendar. #' @param ymd `data.frame`, optional, with dates parsed into their parts. #' @return Integer vector indicating the number of days in each month for #' the dates supplied as argument `ymd`. If no dates are supplied, the #' number of days per month for the calendar as a vector of length 12, for #' a regular year without a leap day. month_days = function(ymd = NULL) { if (is.null(ymd)) return(c(31L, 28L, 31L, 30L, 31L, 30L, 31L, 31L, 30L, 31L, 30L, 31L)) ifelse(self$leap_year(ymd$year), c(31L, 29L, 31L, 30L, 31L, 30L, 31L, 31L, 30L, 31L, 30L, 31L)[ymd$month], c(31L, 28L, 31L, 30L, 31L, 30L, 31L, 31L, 30L, 31L, 30L, 31L)[ymd$month]) }, #' @description Indicate which years are leap years. #' @param yr Integer vector of years to test. #' @return Logical vector with the same length as argument `yr`. `NA` is #' returned where elements in argument `yr` are `NA`. leap_year = function(yr) { ((yr %% 4L == 0L) & (yr %% 100L > 0L)) | (yr %% 400L == 0L) }, #' @description Indicate if the time series described using this calendar #' can be safely converted to a standard date-time type (`POSIXct`, #' `POSIXlt`, `Date`). #' @param offsets The offsets from the CFtime instance. #' @return `TRUE`. POSIX_compatible = function(offsets) { TRUE # nocov }, #' @description Calculate difference in days between a `data.frame` of time #' parts and the origin. #' #' @param x `data.frame`. Dates to calculate the difference for. #' @return Integer vector of a length equal to the number of rows in #' argument `x` indicating the number of days between `x` and the `origin`, #' or `NA` for rows in `x` with `NA` values. date2offset = function(x) { .gregorian_date2offset(x, self$leap_year(x$year)) - private$rd }, #' @description Calculate date parts from day differences from the origin. This #' only deals with days as these are impacted by the calendar. #' Hour-minute-second timestamp parts are handled in [CFCalendar]. #' #' @param x Integer vector of days to add to the origin. #' @return A `data.frame` with columns 'year', 'month' and 'day' and as many #' rows as the length of vector `x`. offset2date = function(x) { .gregorian_offset2date(x + private$rd) } ) ) # The below functions use arithmetic offset calculation from date parts and # vice-versa. These functions are R-ified from pseudo-functions in Reingold & # Derschowitz, "Calendrical Calculations", 2018. #' Dates to offset, from function `fixed-from-gregorian()` #' #' @param x `data.frame` with columns "year", "month" and "date" #' @param leapyear Logical vector of the same length as `x` has rows indicating #' for each row in `x` if this is a leap year. #' @return Integer vector of offsets for the dates in `x`. The offsets are #' relative to the day before 0001-01-01. #' @noRd .gregorian_date2offset <- function(x, leapyear) { year1 <- x$year - 1L corr <- ifelse(x$month <= 2L, 0L, as.integer(leapyear) - 2L) 365L * year1 + year1 %/% 4L - year1 %/% 100L + year1 %/% 400L + (367L * x$month - 362L) %/% 12L + corr + x$day } #' Offsets to dates, from function `gregorian-from-fixed()` and support functions. #' #' @param x Integer vector of offsets. The offsets must be relative to the day #' before 0001-01-01. #' @return `data.frame` with date elements "year", "month" and "day". #' @noRd .gregorian_offset2date <- function(x) { d0 <- x - 1L n400 <- d0 %/% 146097L; d1 <- d0 %% 146097L n100 <- d1 %/% 36524L; d2 <- d1 %% 36524L n4 <- d2 %/% 1461L; d3 <- d2 %% 1461L n1 <- d3 %/% 365L yr <- 400L * n400 + 100L * n100 + 4L * n4 + n1 yr <- ifelse(n100 == 4L | n1 == 4L, yr, yr + 1L) leapyear <- ((yr %% 4L == 0L) & (yr %% 100L > 0L)) | (yr %% 400L == 0L) yr1 <- yr - 1L jan1 <- 365L * yr1 + yr1 %/% 4L - yr1 %/% 100L + yr1 %/% 400L + 1L prior_days <- x - jan1 + ifelse(x < jan1 + 59L + as.integer(leapyear), 0L, 2L - as.integer(leapyear)) mon <- (12L * prior_days + 373L) %/% 367L day <- x - .gregorian_date2offset(data.frame(year = yr, month = mon, day = 1), leapyear) + 1L data.frame(year = yr, month = mon, day = day) } CFtime/R/CFCalendar360.R 0000644 0001762 0000144 00000007545 14726371313 014142 0 ustar ligges users #' @title 360-day CF calendar #' #' @description This class represents a CF calendar of 360 days per year, evenly #' divided over 12 months of 30 days. This calendar is obviously not #' compatible with the standard POSIXt calendar. #' #' This calendar supports dates before year 1 and includes the year 0. #' #' @aliases CFCalendar360 #' @docType class CFCalendar360 <- R6::R6Class("CFCalendar360", inherit = CFCalendar, public = list( #' @description Create a new CF calendar. #' @param nm The name of the calendar. This must be "360_day". This argument #' is superfluous but maintained to be consistent with the initialization #' methods of the parent and sibling classes. #' @param definition The string that defines the units and the origin, as #' per the CF Metadata Conventions. #' @return A new instance of this class. initialize = function(nm, definition) { super$initialize(nm, definition) }, #' @description Indicate which of the supplied dates are valid. #' @param ymd `data.frame` with dates parsed into their parts in columns #' `year`, `month` and `day`. Any other columns are disregarded. #' @return Logical vector with the same length as argument `ymd` has rows #' with `TRUE` for valid days and `FALSE` for invalid days, or `NA` where #' the row in argument `ymd` has `NA` values. valid_days = function(ymd) { ymd$year & ymd$month >= 1L & ymd$month <= 12L & ymd$day >= 1L & ymd$day <= 30L }, #' @description Determine the number of days in the month of the calendar. #' @param ymd `data.frame` with dates parsed into their parts in columns #' `year`, `month` and `day`. Any other columns are disregarded. #' @return A vector indicating the number of days in each month for the #' dates supplied as argument `ymd`. If no dates are supplied, the number #' of days per month for the calendar as a vector of length 12. month_days = function(ymd = NULL) { if (is.null(ymd)) return(rep(30L, 12L)) res <- rep(30L, nrow(ymd)) res[which(is.na(ymd$year))] <- NA res }, #' @description Indicate which years are leap years. #' @param yr Integer vector of years to test. #' @return Logical vector with the same length as argument `yr`. Since this #' calendar does not use leap days, all values will be `FALSE`, or `NA` #' where argument `yr` is `NA`. leap_year = function(yr) { res <- rep(FALSE, length(yr)) res[which(is.na(yr))] <- NA res }, #' @description Calculate difference in days between a `data.frame` of time #' parts and the origin. #' #' @param x `data.frame`. Dates to calculate the difference for. #' #' @return Integer vector of a length equal to the number of rows in #' argument `x` indicating the number of days between `x` and the `origin`, #' or `NA` for rows in `x` with `NA` values. date2offset = function(x) { (x$year - self$origin$year) * 360L + (x$month - self$origin$month) * 30L + x$day - self$origin$day }, #' @description Calculate date parts from day differences from the origin. #' This only deals with days as these are impacted by the calendar. #' Hour-minute-second timestamp parts are handled in [CFCalendar]. #' #' @param x Integer vector of days to add to the origin. #' #' @return A `data.frame` with columns 'year', 'month' and 'day' and as many #' rows as the length of vector `x`. offset2date = function(x) { y <- self$origin$year + x %/% 360L m <- self$origin$month + (x %% 360L) %/% 30L d <- self$origin$day + x %% 30L over <- which(d > 30L) d[over] <- d[over] - 30L m[over] <- m[over] + 1L over <- which(m > 12L) m[over] <- m[over] - 12L y[over] <- y[over] + 1L data.frame(year = y, month = m, day = d, row.names = NULL) } ) ) CFtime/R/CFCalendarStandard.R 0000644 0001762 0000144 00000020040 14757612532 015360 0 ustar ligges users #' @title Standard CF calendar #' #' @description This class represents a standard calendar of 365 or 366 days per #' year. This calendar is compatible with the standard POSIXt calendar for #' periods after the introduction of the Gregorian calendar, 1582-10-15 #' 00:00:00. The calendar starts at 0001-01-01 00:00:00, e.g. the start of the #' Common Era. #' #' Note that this calendar, despite its name, is not the same as that used in #' ISO8601 or many computer systems for periods prior to the introduction of #' the Gregorian calendar. Use of the "proleptic_gregorian" calendar is #' recommended for periods before or straddling the introduction date, as that #' calendar is compatible with POSIXt on most OSes. #' #' @aliases CFCalendarStandard #' @docType class CFCalendarStandard <- R6::R6Class("CFCalendarStandard", inherit = CFCalendar, private = list( # Rata Die, the number of days from the day before 0001-01-01 to # origin of the Gregorian part of this calendar, if present. Used to # convert offsets from the Gregorian calendar origin to the day before # 0001-01-01 for arithmetic calculations. rd_greg = 0L, # Rata Die, the number of days from the day before 0001-01-01 to # origin of the Julian part of this calendar, if present. Used to convert # offsets from the Julian calendar origin to the day before 0001-01-01 #for arithmetic calculations. rd_juli = 0L, # The integer offset for 1582-10-15 00:00:00, when the Gregorian # calendar started, or 1582-10-05, when the gap between Julian and # Gregorian calendars started. The former is set when the calendar origin # is more recent, the latter when the origin is prior to the gap. gap = -1L ), public = list( #' @description Create a new CF calendar. #' @param nm The name of the calendar. This must be "standard" or #' "gregorian" (deprecated). #' @param definition The string that defines the units and the origin, as #' per the CF Metadata Conventions. #' @return A new instance of this class. initialize = function(nm, definition) { super$initialize(nm, definition) private$rd_greg <- .gregorian_date2offset(self$origin, self$leap_year(self$origin$year)) private$rd_juli <- .julian_date2offset(self$origin, self$leap_year(self$origin$year)) private$gap <- if (self$is_gregorian_date(self$origin)) .gregorian_date2offset(data.frame(year = 1582, month = 10, day = 15), self$leap_year(self$origin$year)) - private$rd_greg else .julian_date2offset(data.frame(year = 1582, month = 10, day = 5), self$leap_year(self$origin$year)) - private$rd_juli }, #' @description Indicate which of the supplied dates are valid. #' @param ymd `data.frame` with dates parsed into their parts in columns #' `year`, `month` and `day`. Any other columns are disregarded. #' @return Logical vector with the same length as argument `ymd` has rows #' with `TRUE` for valid days and `FALSE` for invalid days, or `NA` where #' the row in argument `ymd` has `NA` values. valid_days = function(ymd) { ymd$year >= 1L & ymd$month >= 1L & ymd$month <= 12L & ymd$day >= 1L & ifelse(self$is_gregorian_date(ymd), # Gregorian calendar ifelse(self$leap_year(ymd$year), ymd$day <= c(31L, 29L, 31L, 30L, 31L, 30L, 31L, 31L, 30L, 31L, 30L, 31L)[ymd$month], ymd$day <= c(31L, 28L, 31L, 30L, 31L, 30L, 31L, 31L, 30L, 31L, 30L, 31L)[ymd$month]), # Julian calendar ifelse(ymd$year == 1582L & ymd$month == 10L & ymd$day > 4L, FALSE, # days 1582-10-05 - 1582-10-14 do not exist ifelse(ymd$year %% 4L == 0L, ymd$day <= c(31L, 29L, 31L, 30L, 31L, 30L, 31L, 31L, 30L, 31L, 30L, 31L)[ymd$month], ymd$day <= c(31L, 28L, 31L, 30L, 31L, 30L, 31L, 31L, 30L, 31L, 30L, 31L)[ymd$month]) ) ) }, #' @description Indicate which of the supplied dates are in the Gregorian #' part of the calendar, e.g. 1582-10-15 or after. #' @param ymd `data.frame` with dates parsed into their parts in columns #' `year`, `month` and `day`. Any other columns are disregarded. #' @return Logical vector with the same length as argument `ymd` has rows #' with `TRUE` for days in the Gregorian part of the calendar and `FALSE` #' otherwise, or `NA` where the row in argument `ymd` has `NA` values. is_gregorian_date = function(ymd) { ymd$year > 1582L | (ymd$year == 1582L & (ymd$month > 10L | (ymd$month == 10L & ymd$day >= 15L))) }, #' @description Indicate if the time series described using this calendar #' can be safely converted to a standard date-time type (`POSIXct`, #' `POSIXlt`, `Date`). This is only the case if all offsets are for #' timestamps fall on or after the start of the Gregorian calendar, #' 1582-10-15 00:00:00. #' @param offsets The offsets from the CFtime instance. #' @return `TRUE`. POSIX_compatible = function(offsets) { all(offsets >= private$gap) }, #' @description Determine the number of days in the month of the calendar. #' @param ymd `data.frame`, optional, with dates parsed into their parts. #' @return A vector indicating the number of days in each month for the #' dates supplied as argument `ymd`. If no dates are supplied, the number #' of days per month for the calendar as a vector of length 12, for a #' regular year without a leap day. month_days = function(ymd = NULL) { if (is.null(ymd)) return(c(31L, 28L, 31L, 30L, 31L, 30L, 31L, 31L, 30L, 31L, 30L, 31L)) ifelse(self$leap_year(ymd$year), c(31L, 29L, 31L, 30L, 31L, 30L, 31L, 31L, 30L, 31L, 30L, 31L)[ymd$month], c(31L, 28L, 31L, 30L, 31L, 30L, 31L, 31L, 30L, 31L, 30L, 31L)[ymd$month]) }, #' @description Indicate which years are leap years. #' @param yr Integer vector of years to test. #' @return Logical vector with the same length as argument `yr`. `NA` is #' returned where elements in argument `yr` are `NA`. leap_year = function(yr) { ifelse(yr <= 1582L, yr %% 4L == 0L, ((yr %% 4L == 0L) & (yr %% 100L > 0L)) | (yr %% 400L == 0L) ) }, #' @description Calculate difference in days between a `data.frame` of time #' parts and the origin. #' #' @param x `data.frame`. Dates to calculate the difference for. #' @return Integer vector of a length equal to the number of rows in #' argument `x` indicating the number of days between `x` and the origin #' of the calendar, or `NA` for rows in `x` with `NA` values. date2offset = function(x) { leap <- self$leap_year(x$year) ifelse(self$is_gregorian_date(x), .gregorian_date2offset(x, leap), .julian_date2offset(x, leap) ) - if (private$gap > 0L) private$rd_juli else private$rd_greg }, #' @description Calculate date parts from day differences from the origin. This #' only deals with days as these are impacted by the calendar. #' Hour-minute-second timestamp parts are handled in [CFCalendar]. #' #' @param x Integer vector of days to add to the origin. #' @return A `data.frame` with columns 'year', 'month' and 'day' and as many #' rows as the length of vector `x`. offset2date = function(x) { rd <- if (private$gap > 0L) private$rd_juli else private$rd_greg len <- length(x) gndx <- x >= private$gap & !is.na(x) if (any(gndx)) greg <- .gregorian_offset2date(x[gndx] + rd) else greg <- data.frame(year = integer(), month = integer(), day = integer()) jndx <- x < private$gap & !is.na(x) if (any(jndx)) juli <- .julian_offset2date(x[jndx] + rd) else juli <- data.frame(year = integer(), month = integer(), day = integer()) yr <- mon <- day <- rep(NA_integer_, len) yr[gndx] <- greg$year; yr[jndx] <- juli$year mon[gndx] <- greg$month; mon[jndx] <- juli$month day[gndx] <- greg$day; day[jndx] <- juli$day data.frame(year = yr, month = mon, day = day) } ) ) CFtime/R/CFCalendar365.R 0000644 0001762 0000144 00000010275 14731346614 014143 0 ustar ligges users #' @title 365-day CF calendar #' #' @description This class represents a CF calendar of 365 days per year, having #' no leap days in any year. This calendar is not compatible with the standard #' POSIXt calendar. #' #' This calendar supports dates before year 1 and includes the year 0. #' #' @aliases CFCalendar365 #' @docType class CFCalendar365 <- R6::R6Class("CFCalendar365", inherit = CFCalendar, private = list( # Rata Die, the number of days from the day before 0001-01-01 to # origin of this calendar. Used to convert offsets from the calendar origin # to the day before 0001-01-01 for arithmetic calculations. rd = 0L ), public = list( #' @description Create a new CF calendar of 365 days per year. #' @param nm The name of the calendar. This must be "365_day" or "noleap". #' @param definition The string that defines the units and the origin, as #' per the CF Metadata Conventions. #' @return A new instance of this class. initialize = function(nm, definition) { super$initialize(nm, definition) private$rd <- self$date2offset(self$origin) }, #' @description Indicate which of the supplied dates are valid. #' @param ymd `data.frame` with dates parsed into their parts in columns #' `year`, `month` and `day`. Any other columns are disregarded. #' @return Logical vector with the same length as argument `ymd` has rows #' with `TRUE` for valid days and `FALSE` for invalid days, or `NA` where #' the row in argument `ymd` has `NA` values. valid_days = function(ymd) { ymd$year & ymd$month >= 1L & ymd$month <= 12L & ymd$day >= 1L & ymd$day <= c(31L, 28L, 31L, 30L, 31L, 30L, 31L, 31L, 30L, 31L, 30L, 31L)[ymd$month] }, #' @description Determine the number of days in the month of the calendar. #' @param ymd `data.frame`, optional, with dates parsed into their parts. #' @return A vector indicating the number of days in each month for the #' dates supplied as argument `ymd`. If no dates are supplied, the number #' of days per month for the calendar as a vector of length 12. month_days = function(ymd = NULL) { if (is.null(ymd)) return(c(31L, 28L, 31L, 30L, 31L, 30L, 31L, 31L, 30L, 31L, 30L, 31L)) res <- c(31L, 28L, 31L, 30L, 31L, 30L, 31L, 31L, 30L, 31L, 30L, 31L)[ymd$month] res[which(is.na(ymd$year))] <- NA res }, #' @description Indicate which years are leap years. #' @param yr Integer vector of years to test. #' @return Logical vector with the same length as argument `yr`. Since this #' calendar does not use leap days, all values will be `FALSE`, or `NA` #' where argument `yr` is `NA`. leap_year = function(yr) { res <- rep(FALSE, length(yr)) res[which(is.na(yr))] <- NA res }, #' @description Calculate difference in days between a `data.frame` of time #' parts and the origin. #' #' @param x `data.frame`. Dates to calculate the difference for. #' #' @return Integer vector of a length equal to the number of rows in #' argument `x` indicating the number of days between `x` and the `origin`, #' or `NA` for rows in `x` with `NA` values. date2offset = function(x) { corr <- ifelse(x$month <= 2L, 0L, -2L) 365L * (x$year - 1L) + (367L * x$month - 362L) %/% 12L + corr + x$day - private$rd }, #' @description Calculate date parts from day differences from the origin. This #' only deals with days as these are impacted by the calendar. #' Hour-minute-second timestamp parts are handled in [CFCalendar]. #' #' @param x Integer vector of days to add to the origin. #' @return A `data.frame` with columns 'year', 'month' and 'day' and as many #' rows as the length of vector `x`. offset2date = function(x) { d0 <- x - 1L + private$rd # d0 is offset relative to year 0, 0-based yr <- d0 %/% 365L + 1L # full years d1 <- d0 %% 365L # remaining days corr <- ifelse(d1 < 59L, 0L, 2L) # correct for days past February mon <- (12L * (d1 + corr) + 373L) %/% 367L day <- d1 - (367L * mon - 362L) %/% 12L + corr + 1L data.frame(year = yr, month = mon, day = day) } ) ) CFtime/R/CFCalendarJulian.R 0000644 0001762 0000144 00000013316 14734517024 015045 0 ustar ligges users #' @title Julian CF calendar #' #' @description This class represents a Julian calendar of 365 days per year, #' with every fourth year being a leap year of 366 days. The months and the #' year align with the standard calendar. This calendar is not compatible with #' the standard POSIXt calendar. #' #' This calendar starts on 1 January of year 1: 0001-01-01 00:00:00. Any dates #' before this will generate an error. #' #' @aliases CFCalendarJulian #' @docType class CFCalendarJulian <- R6::R6Class("CFCalendarJulian", inherit = CFCalendar, private = list( # Rata Die, the number of days from the day before 0001-01-01 to # origin of this calendar. Used to convert offsets from the calendar origin # to the day before 0001-01-01 for arithmetic calculations. rd = 0L ), public = list( #' @description Create a new CF calendar. #' @param nm The name of the calendar. This must be "julian". This argument #' is superfluous but maintained to be consistent with the initialization #' methods of the parent and sibling classes. #' @param definition The string that defines the units and the origin, as #' per the CF Metadata Conventions. #' @return A new instance of this class. initialize = function(nm, definition) { super$initialize(nm, definition) private$rd <- .julian_date2offset(self$origin, self$leap_year(self$origin$year)) }, #' @description Indicate which of the supplied dates are valid. #' @param ymd `data.frame` with dates parsed into their parts in columns #' `year`, `month` and `day`. Any other columns are disregarded. #' @return Logical vector with the same length as argument `ymd` has rows #' with `TRUE` for valid days and `FALSE` for invalid days, or `NA` where #' the row in argument `ymd` has `NA` values. valid_days = function(ymd) { ymd$year >= 1L & ymd$month >= 1L & ymd$month <= 12L & ymd$day >= 1L & ifelse(self$leap_year(ymd$year), ymd$day <= c(31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31)[ymd$month], ymd$day <= c(31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31)[ymd$month]) }, #' @description Determine the number of days in the month of the calendar. #' @param ymd `data.frame`, optional, with dates parsed into their parts. #' @return A vector indicating the number of days in each month for the #' dates supplied as argument `ymd`. If no dates are supplied, the number #' of days per month for the calendar as a vector of length 12, for a #' regular year without a leap day. month_days = function(ymd = NULL) { if (is.null(ymd)) return(c(31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31)) ifelse(self$leap_year(ymd$year), c(31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31)[ymd$month], c(31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31)[ymd$month]) }, #' @description Indicate which years are leap years. #' @param yr Integer vector of years to test. #' @return Logical vector with the same length as argument `yr`. `NA` is #' returned where elements in argument `yr` are `NA`. leap_year = function(yr) { yr %% 4L == 0L }, #' @description Calculate difference in days between a `data.frame` of time #' parts and the origin. #' #' @param x `data.frame`. Dates to calculate the difference for. #' @return Integer vector of a length equal to the number of rows in #' argument `x` indicating the number of days between `x` and the origin #' of the calendar, or `NA` for rows in `x` with `NA` values. date2offset = function(x) { .julian_date2offset(x, self$leap_year(x$year)) - private$rd }, #' @description Calculate date parts from day differences from the origin. This #' only deals with days as these are impacted by the calendar. #' Hour-minute-second timestamp parts are handled in [CFCalendar]. #' #' @param x Integer vector of days to add to the origin. #' @return A `data.frame` with columns 'year', 'month' and 'day' and as many #' rows as the length of vector `x`. offset2date = function(x) { .julian_offset2date(x + private$rd) } ) ) # ============================================================================== # The below functions use arithmetic offset calculation from date parts and # vice-versa. These functions are R-ified from pseudo-functions in Reingold & # Derschowitz, "Calendrical Calculations", 2018. #' Dates to offset, from function `fixed-from-julian()` #' #' @param x `data.frame` with columns "year", "month" and "date" #' @param leapyear Logical vector of the same length as `x` has rows indicating #' for each row in `x` if this is a leap year. #' @return Integer vector of offsets for the dates in `x`. The offsets are #' relative to the day before 0001-01-01. #' @noRd .julian_date2offset <- function(x, leapyear) { year1 <- x$year - 1L corr <- ifelse(x$month <= 2L, 0L, as.integer(leapyear) - 2L) 365L * year1 + year1 %/% 4L + (367L * x$month - 362L) %/% 12L + corr + x$day - 2L } #' Offsets to dates, from function `julian-from-fixed()` and support functions. #' #' @param x Integer vector of offsets. The offsets must be relative to the day #' before 0001-01-01. #' @return `data.frame` with date elements "year", "month" and "day". #' @noRd .julian_offset2date <- function(x) { yr <- (4 * (x + 1L) + 1464L) %/% 1461L leapyear <- yr %% 4L == 0L yr1 <- yr - 1L jan1 <- -2L + 365L * yr1 + yr1 %/% 4L + 1L prior_days <- x - jan1 + ifelse(x < jan1 + 59L + as.integer(leapyear), 0L, 2L - as.integer(leapyear)) mon <- (12L * prior_days + 373L) %/% 367L day <- x - .julian_date2offset(data.frame(year = yr, month = mon, day = 1), leapyear) + 1L data.frame(year = yr, month = mon, day = day) } CFtime/R/api.R 0000644 0001762 0000144 00000113021 14760146635 012530 0 ustar ligges users #' Create a CFTime object #' #' This function creates an instance of the [CFTime] class. The arguments to the #' call are typically read from a CF-compliant data file with climatological #' observations or climate projections. Specification of arguments can also be #' made manually in a variety of combinations. #' #' @param definition A character string describing the time coordinate. #' @param calendar A character string describing the calendar to use with the #' time dimension definition string. Default value is "standard". #' @param offsets Numeric or character vector, optional. When numeric, a vector #' of offsets from the origin in the time series. When a character vector of #' length 2 or more, timestamps in ISO8601 or UDUNITS format. When a character #' string, a timestamp in ISO8601 or UDUNITS format and then a time series #' will be generated with a separation between steps equal to the unit of #' measure in the definition, inclusive of the definition timestamp. The unit #' of measure of the offsets is defined by the time series definition. #' @returns An instance of the `CFTime` class. #' @export #' @name CFtime-function #' @examples #' CFtime("days since 1850-01-01", "julian", 0:364) #' #' CFtime("hours since 2023-01-01", "360_day", "2023-01-30T23:00") CFtime <- function(definition, calendar = "standard", offsets = NULL) { CFTime$new(definition, calendar, offsets) } # ============================================================================== # Functions to access CFTime properties and methods #' @aliases properties #' @title Properties of a CFTime object #' #' @description These functions return the properties of an instance of the #' [CFTime] class. The properties are all read-only, but offsets can be added #' using the `+` operator. #' #' @param t An instance of `CFTime`. #' #' @returns `calendar()` and `unit()` return a character string. #' `origin()` returns a data frame of timestamp elements with a single row #' of data. `timezone()` returns the calendar time zone as a character #' string. `offsets()` returns a vector of offsets or `NULL` if no offsets #' have been set. #' #' @examples #' t <- CFtime("days since 1850-01-01", "julian", 0:364) #' definition(t) #' calendar(t) #' unit(t) #' timezone(t) #' origin(t) #' offsets(t) #' resolution(t) #' @describeIn properties The definition string of the `CFTime` instance. #' @export definition <- function(t) t$cal$definition #' @describeIn properties The calendar of the `CFTime` instance. #' @export calendar <- function(t) t$cal$name #' @describeIn properties The unit of the `CFTime` instance. #' @export unit <- function(t) t$unit #' @describeIn properties The origin of the `CFTime` instance in timestamp elements. #' @export origin <- function(t) t$cal$origin #' @describeIn properties The time zone of the calendar of the `CFTime` instance as a character string. #' @export timezone <- function(t) t$cal$timezone #' @describeIn properties The offsets of the `CFTime` instance as a numeric vector. #' @export offsets <- function(t) t$offsets #' @describeIn properties The average separation between the offsets in the `CFTime` instance. #' @export resolution <- function(t) t$resolution #' Bounds of the time offsets #' #' CF-compliant netCDF files store time information as a single offset value for #' each step along the dimension, typically centered on the valid interval of #' the data (e.g. 12-noon for day data). Optionally, the lower and upper values #' of the valid interval are stored in a so-called "bounds" variable, as an #' array with two rows (lower and higher value) and a column for each offset. #' With function `bounds()<-` those bounds can be set for a `CFTime` instance. #' The bounds can be retrieved with the `bounds()` function. #' #' @param x A `CFTime` instance. #' @param format Optional. A single string with format specifiers, see #' [CFtime::format()] for details. #' @return If bounds have been set, an array of bounds values with dimensions #' (2, length(offsets)). The first row gives the lower bound, the second row #' the upper bound, with each column representing an offset of `x`. If the #' `format` argument is specified, the bounds values are returned as strings #' according to the format. `NULL` when no bounds have been set. #' @aliases bounds #' @export #' @examples #' t <- CFtime("days since 2024-01-01", "standard", seq(0.5, by = 1, length.out = 366)) #' as_timestamp(t)[1:3] #' bounds(t) <- rbind(0:365, 1:366) #' bounds(t)[, 1:3] #' bounds(t, "%d-%b-%Y")[, 1:3] bounds <- function(x, format) { x$get_bounds(format) } #' @rdname bounds #' @param value A `matrix` (or `array`) with dimensions (2, length(offsets)) #' giving the lower (first row) and higher (second row) bounds of each offset #' (this is the format that the CF Metadata Conventions uses for storage in #' netCDF files). Use `FALSE` to unset any previously set bounds, `TRUE` to #' set regular bounds at mid-points between the offsets (which must be regular #' as well). #' @export `bounds<-` <- function(x, value) { x$set_bounds(value) x } #' The length of the offsets contained in the `CFTime` instance. #' #' @param x The `CFTime` instance whose length will be returned #' #' @return The number of offsets in the specified `CFTime` instance. #' @export #' #' @examples #' t <- CFtime("days since 1850-01-01", "julian", 0:364) #' length(t) length.CFTime <- function(x) base::length(x$offsets) #' Return the timestamps contained in the `CFTime` instance. #' #' @param x The `CFTime` instance whose timestamps will be returned. #' @param ... Ignored. #' #' @return The timestamps in the specified `CFTime` instance. #' @export #' #' @examples #' t <- CFtime("days since 1850-01-01", "julian", 0:364) #' as.character(t) as.character.CFTime <- function(x, ...) { x$as_timestamp() } #' Create a factor for a `CFTime` instance #' #' Method for [base::cut()] applied to [CFTime] objects. #' #' When `breaks` is one of `"year", "season", "quarter", "month", "dekad", #' "day"` a factor is generated like by [CFfactor()]. #' #' When `breaks` is a vector of character timestamps a factor is produced with a #' level for every interval between timestamps. The last timestamp, therefore, #' is only used to close the interval started by the pen-ultimate timestamp - #' use a distant timestamp (e.g. `range(x)[2]`) to ensure that all offsets to #' the end of the CFTime time series are included, if so desired. The last #' timestamp will become the upper bound in the `CFTime` instance that is #' returned as an attribute to this function so a sensible value for the last #' timestamp is advisable. #' #' This method works similar to [base::cut.POSIXt()] but there are some #' differences in the arguments: for `breaks` the set of options is different #' and no preceding integer is allowed, `labels` are always assigned using #' values of `breaks`, and the interval is always left-closed. #' #' @param x An instance of `CFTime`. #' @param breaks A character string of a factor period (see [CFfactor()] for a #' description), or a character vector of timestamps that conform to the #' calendar of `x`, with a length of at least 2. Timestamps must be given in #' ISO8601 format, e.g. "2024-04-10 21:31:43". #' @param ... Ignored. #' @returns A factor with levels according to the `breaks` argument, with #' attributes 'period', 'era' and 'CFTime'. When `breaks` is a factor #' period, attribute 'period' has that value, otherwise it is '"day"'. When #' `breaks` is a character vector of timestamps, attribute 'CFTime' holds an #' instance of `CFTime` that has the same definition as `x`, but with (ordered) #' offsets generated from the `breaks`. Attribute 'era' is always -1. #' @aliases cut #' @seealso [CFfactor()] produces a factor for several fixed periods, including #' for eras. #' @export #' @examples #' x <- CFtime("days since 2021-01-01", "365_day", 0:729) #' breaks <- c("2022-02-01", "2021-12-01", "2023-01-01") #' cut(x, breaks) cut.CFTime <- function (x, breaks, ...) { if (!inherits(x, "CFTime")) stop("Argument 'x' must be a CFTime instance", call. = FALSE) x$cut(breaks) } #' Find the index of timestamps in the time series #' #' Find the index in the time series for each timestamp given in argument `x`. #' Values of `x` that are before the earliest value in `y` will be returned as #' `0`; values of `x` that are after the latest values in `y` will be returned #' as `.Machine$integer.max`. Alternatively, when `x` is a numeric vector of #' index values, return the valid indices of the same vector, with the side #' effect being the attribute "CFTime" associated with the result. #' #' Timestamps can be provided as vectors of character strings, `POSIXct` or #' `Date.` #' #' Matching also returns index values for timestamps that fall between two #' elements of the time series - this can lead to surprising results when time #' series elements are positioned in the middle of an interval (as the CF #' Metadata Conventions instruct us to "reasonably assume"): a time series of #' days in January would be encoded in a netCDF file as #' `c("2024-01-01 12:00:00", "2024-01-02 12:00:00", "2024-01-03 12:00:00", ...)` #' so `x <- c("2024-01-01", "2024-01-02", "2024-01-03")` would result in #' `(NA, 1, 2)` (or `(NA, 1.5, 2.5)` with `method = "linear"`) because the date #' values in `x` are at midnight. This situation is easily avoided by ensuring #' that `y` has bounds set (use `bounds(y) <- TRUE` as a proximate solution if #' bounds are not stored in the netCDF file). See the Examples. #' #' If bounds are set, the indices are taken from those bounds. Returned indices #' may fall in between bounds if the latter are not contiguous, with the #' exception of the extreme values in `x`. #' #' Values of `x` that are not valid timestamps according to the calendar of `y` #' will be returned as `NA`. #' #' `x` can also be a numeric vector of index values, in which case the valid #' values in `x` are returned. If negative values are passed, the positive #' counterparts will be excluded and then the remainder returned. Positive and #' negative values may not be mixed. Using a numeric vector has the side effect #' that the result has the attribute "CFTime" describing the temporal dimension #' of the slice. If index values outside of the range of `y` (`1:length(y)`) are #' provided, an error will be thrown. #' #' @param x Vector of `character`, `POSIXt` or `Date` values to find indices #' for, or a numeric vector. #' @param y [CFTime] instance. #' @param method Single value of "constant" or "linear". If `"constant"` or when #' bounds are set on argument `y`, return the index value for each match. If #' `"linear"`, return the index value with any fractional value. #' #' @returns A numeric vector giving indices into the "time" dimension of the #' data set associated with `y` for the values of `x`. If there is at least 1 #' valid index, then attribute "CFTime" contains an instance of `CFTime` that #' describes the dimension of filtering the data set associated with `y` with #' the result of this function, excluding any `NA`, `0` and #' `.Machine$integer.max` values. #' @export #' #' @examples #' cf <- CFtime("days since 2020-01-01", "360_day", 1440:1799 + 0.5) #' as_timestamp(cf)[1:3] #' x <- c("2024-01-01", "2024-01-02", "2024-01-03") #' indexOf(x, cf) #' indexOf(x, cf, method = "linear") #' #' bounds(cf) <- TRUE #' indexOf(x, cf) #' #' # Non-existent calendar day in a `360_day` calendar #' x <- c("2024-03-30", "2024-03-31", "2024-04-01") #' indexOf(x, cf) #' #' # Numeric x #' indexOf(c(29, 30, 31), cf) indexOf <- function(x, y, method = "constant") { y$indexOf(x, method) } #' Extreme time series values #' #' Character representation of the extreme values in the time series. #' #' @param x An instance of the [CFTime] class. #' @param format A character string with format specifiers, optional. If it is #' missing or an empty string, the most economical ISO8601 format is chosen: #' "date" when no time information is present in `x`, "timestamp" otherwise. #' Otherwise a suitable format specifier can be provided. #' @param bounds Logical to indicate if the extremes from the bounds should be #' used, if set. Defaults to `FALSE`. #' @param ... Ignored. #' @param na.rm Ignored. #' @return Vector of two character representations of the extremes of the time #' series. #' @export #' @examples #' cf <- CFtime("days since 1850-01-01", "julian", 0:364) #' range(cf) #' range(cf, "%Y-%b-%e") range.CFTime <- function(x, format = "", bounds = FALSE, ..., na.rm = FALSE) { x$range(format, bounds) } #' Indicates if the time series is complete #' #' This function indicates if the time series is complete, meaning that the time #' steps are equally spaced and there are thus no gaps in the time series. #' #' This function gives exact results for time series where the nominal #' *unit of separation* between observations in the time series is exact in #' terms of the calendar unit. As an example, for a calendar unit of "days" where the #' observations are spaced a fixed number of days apart the result is exact, but #' if the same calendar unit is used for data that is on a monthly basis, the #' *assessment* is approximate because the number of days per month is variable #' and dependent on the calendar (the exception being the `360_day` calendar, #' where the assessment is exact). The *result* is still correct in most cases #' (including all CF-compliant data sets that the developers have seen) although #' there may be esoteric constructions of CFTime and offsets that trip up this #' implementation. #' #' @param x An instance of the [CFTime] class. #' @returns logical. `TRUE` if the time series is complete, with no gaps; #' `FALSE` otherwise. If no offsets have been added to the `CFTime` instance, #' `NA` is returned. #' @export #' @examples #' t <- CFtime("days since 1850-01-01", "julian", 0:364) #' is_complete(t) is_complete <- function(x) { if (!inherits(x, "CFTime")) stop("Argument must be an instance of `CFTime`", call. = FALSE) x$equidistant() } #' Which time steps fall within two extreme values #' #' Given two extreme character timestamps, return a logical vector of a length #' equal to the number of time steps in the [CFTime] instance with values `TRUE` #' for those time steps that fall between the two extreme values, `FALSE` #' otherwise. This can be used to select slices from the time series in reading #' or analysing data. #' #' If bounds were set these will be preserved. #' #' @param x The `CFTime` instance to operate on. #' @param extremes Character vector of two timestamps that represent the #' extremes of the time period of interest. The timestamps must be in #' increasing order. The timestamps need not fall in the range of the time #' steps in argument `x. #' @param rightmost.closed Is the larger extreme value included in the result? #' Default is `FALSE`. #' @returns A logical vector with a length equal to the number of time steps in #' `x` with values `TRUE` for those time steps that fall between the two #' extreme values, `FALSE` otherwise. The earlier timestamp is included, the #' later timestamp is excluded. A specification of `c("2022-01-01", "2023-01-01")` #' will thus include all time steps that fall in the year 2022. #' @export #' @examples #' t <- CFtime("hours since 2023-01-01 00:00:00", "standard", 0:23) #' slice(t, c("2022-12-01", "2023-01-01 03:00")) slice <- function(x, extremes, rightmost.closed = FALSE) { if (!inherits(x, "CFTime")) stop("First argument must be an instance of `CFTime`", call. = FALSE) x$slice(extremes, rightmost.closed) } #' Which time steps fall within two extreme values #' #' Avoid using this function, use [slice()] instead. This function will be #' deprecated in the near future. #' #' @param x,extremes,rightmost.closed See `slice()`. #' @returns See `slice()`. #' @export #' @examples #' t <- CFtime("hours since 2023-01-01 00:00:00", "standard", 0:23) #' slab(t, c("2022-12-01", "2023-01-01 03:00")) slab <- function(x, extremes, rightmost.closed = FALSE) { # FIXME: Deprecate 2025-06 x$slice(extremes, rightmost.closed) } #' Equivalence of CFTime objects #' #' This operator can be used to test if two [CFTime] objects represent the same #' CF-convention time coordinates. Two `CFTime` objects are considered equivalent #' if they have an equivalent calendar and the same offsets. #' #' @param e1,e2 Instances of the `CFTime` class. #' @returns `TRUE` if the `CFTime` objects are equivalent, `FALSE` otherwise. #' @export #' @aliases CFtime-equivalent #' @examples #' e1 <- CFtime("days since 1850-01-01", "gregorian", 0:364) #' e2 <- CFtime("days since 1850-01-01 00:00:00", "standard", 0:364) #' e1 == e2 "==.CFTime" <- function(e1, e2) e1$cal$is_equivalent(e2$cal) && length(e1$offsets) == length(e2$offsets) && all(e1$offsets == e2$offsets) #' Extend a CFTime object #' #' A [CFTime] instance can be extended with this operator, using values from #' another `CFTime` instance, or a vector of numeric offsets or character #' timestamps. If the values come from another `CFTime` instance, the calendars #' of the two instances must be compatible If the calendars of the `CFTime` #' instances are not compatible, an error is thrown. #' #' The resulting `CFTime` instance will have the offsets of the original #' `CFTime` instance, appended with offsets from argument `e2` in the order that #' they are specified. If the new sequence of offsets is not monotonically #' increasing a warning is generated (the COARDS metadata convention requires #' offsets to be monotonically increasing). #' #' There is no reordering or removal of duplicates. This is because the time #' series are usually associated with a data set and the correspondence between #' the data in the files and the `CFTime` instance is thus preserved. When #' merging the data sets described by this time series, the order must be #' identical to the merging here. #' #' Note that when adding multiple vectors of offsets to a `CFTime` instance, it #' is more efficient to first concatenate the vectors and then do a final #' addition to the `CFTime` instance. So avoid #' `CFtime(definition, calendar, e1) + CFtime(definition, calendar, e2) + CFtime(definition, calendar, e3) + ...` #' but rather do `CFtime(definition, calendar) + c(e1, e2, e3, ...)`. It is the #' responsibility of the operator to ensure that the offsets of the different #' data sets are in reference to the same calendar. #' #' Note also that `RNetCDF` and `ncdf4` packages both return the values of the #' "time" dimension as a 1-dimensional array. You have to `dim(time_values) <- #' NULL` to de-class the array to a vector before adding offsets to an existing #' `CFtime` instance. #' #' Any bounds that were set will be removed. Use [bounds()] to retrieve the #' bounds of the individual `CFTime` instances and then set them again after #' merging the two instances. #' #' @param e1 Instance of the `CFTime` class. #' @param e2 Instance of the `CFTime` class with a calendar compatible with that #' of argument `e1`, or a numeric vector with offsets from the origin of #' argument `e1`, or a vector of `character` timestamps in ISO8601 or UDUNITS #' format. #' @returns A `CFTime` object with the offsets of argument `e1` extended by the #' values from argument `e2`. #' @export #' @aliases CFtime-merge #' @examples #' e1 <- CFtime("days since 1850-01-01", "gregorian", 0:364) #' e2 <- CFtime("days since 1850-01-01 00:00:00", "standard", 365:729) #' e1 + e2 "+.CFTime" <- function(e1, e2) { if (inherits(e2, "CFTime")) { if (!e1$cal$is_compatible(e2$cal)) stop("Calendars not compatible", call. = FALSE) # nocov if (all(e1$cal$origin[1:6] == e2$cal$origin[1:6])) CFTime$new(e1$cal$definition, e1$cal$name, c(e1$offsets, e2$offsets)) else { diff <- e1$cal$parse(paste(e2$cal$origin_date, e2$cal$origin_time))$offset CFTime$new(e1$cal$definition, e1$cal$name, c(e1$offsets, e2$offsets + diff)) } } else if (is.numeric(e2) && .validOffsets(e2)) { CFTime$new(e1$cal$definition, e1$cal$name, c(e1$offsets, e2)) } else { time <- e1$cal$parse(e2) if (anyNA(time$year)) stop("Argument `e2` contains invalid timestamps", call. = FALSE) # nocov CFTime$new(e1$cal$definition, e1$cal$name, c(e1$offsets, time$offset)) } } # ============================================================================== # Factors and coverage #' Create a factor from the offsets in a `CFTime` instance #' #' With this function a factor can be generated for the time series, or a part #' thereof, contained in the [CFTime] instance. This is specifically interesting #' for creating factors from the date part of the time series that aggregate the #' time series into longer time periods (such as month) that can then be used to #' process daily CF data sets using, for instance, `tapply()`. #' #' The factor will respect the calendar that the time series is built on. For #' `period`s longer than a day this will result in a factor where the calendar #' is no longer relevant (because calendars impacts days, not dekads, months, #' quarters, seasons or years). #' #' The factor will be generated in the order of the offsets of the `CFTime` #' instance. While typical CF-compliant data sources use ordered time series #' there is, however, no guarantee that the factor is ordered as multiple #' `CFTime` objects may have been merged out of order. For most processing with #' a factor the ordering is of no concern. #' #' If the `era` parameter is specified, either as a vector of years to include #' in the factor, or as a list of such vectors, the factor will only consider #' those values in the time series that fall within the list of years, inclusive #' of boundary values. Other values in the factor will be set to `NA`. The years #' need not be contiguous, within a single vector or among the list items, or in #' order. #' #' The following periods are supported by this function: #' #' \itemize{ #' \item `year`, the year of each offset is returned as "YYYY". #' \item `season`, the meteorological season of each offset is returned as #' "Sx", with x being 1-4, preceeded by "YYYY" if no `era` is #' specified. Note that December dates are labeled as belonging to the #' subsequent year, so the date "2020-12-01" yields "2021S1". This implies #' that for standard CMIP files having one or more full years of data the #' first season will have data for the first two months (January and #' February), while the final season will have only a single month of data #' (December). #' \item `quarter`, the calendar quarter of each offset is returned as "Qx", #' with x being 1-4, preceeded by "YYYY" if no `era` is specified. #' \item `month`, the month of each offset is returned as "01" to #' "12", preceeded by "YYYY-" if no `era` is specified. This is the default #' period. #' \item `dekad`, ten-day periods are returned as #' "Dxx", where xx runs from "01" to "36", preceeded by "YYYY" if no `era` #' is specified. Each month is subdivided in dekads as follows: 1- days 01 - #' 10; 2- days 11 - 20; 3- remainder of the month. #' \item `day`, the month and day of each offset are returned as "MM-DD", #' preceeded by "YYYY-" if no `era` is specified. #' } #' #' It is not possible to create a factor for a period that is shorter than the #' temporal resolution of the source data set from which the `t` argument #' derives. As an example, if the source data set has monthly data, a dekad or #' day factor cannot be created. #' #' Creating factors for other periods is not supported by this function. Factors #' based on the timestamp information and not dependent on the calendar can #' trivially be constructed from the output of the [as_timestamp()] function. #' #' For non-era factors the attribute 'CFTime' of the result contains a `CFTime` #' instance that is valid for the result of applying the factor to a data set #' that the `t` argument is associated with. In other words, if `CFTime` #' instance 'At' describes the temporal dimension of data set 'A' and a factor #' 'Af' is generated like `Af <- CFfactor(At)`, then `Bt <- attr(Af, "CFTime")` #' describes the temporal dimension of the result of, say, #' `B <- apply(A, 1:2, tapply, Af, FUN)`. The 'CFTime' attribute is `NULL` for #' era factors. #' #' @param t An instance of the `CFTime` class whose offsets will be used to #' construct the factor. #' @param period character. A character string with one of the values "year", #' "season", "quarter", "month" (the default), "dekad" or "day". #' @param era numeric or list, optional. Vector of years for which to #' construct the factor, or a list whose elements are each a vector of years. #' If `era` is not specified, the factor will use the entire time series for #' the factor. #' #' @returns If `era` is a single vector or not specified, a factor with a #' length equal to the number of offsets in `t`. If `era` is a list, a list #' with the same number of elements and names as `era`, each containing a #' factor. Elements in the factor will be set to `NA` for time series values #' outside of the range of specified years. #' #' The factor, or factors in the list, have attributes 'period', 'era' and #' 'CFTime'. Attribute 'period' holds the value of the `period` argument. #' Attribute 'era' indicates the number of years that are included in the #' era, or -1 if no `era` is provided. Attribute 'CFTime' holds an #' instance of `CFTime` that has the same definition as `t`, but with offsets #' corresponding to the mid-point of non-era factor levels; if the `era` #' argument is specified, attribute 'CFTime' is `NULL`. #' @seealso [cut()] creates a non-era factor for arbitrary cut points. #' @export #' #' @examples #' t <- CFtime("days since 1949-12-01", "360_day", 19830:54029) #' #' # Create a dekad factor for the whole time series #' f <- CFfactor(t, "dekad") #' #' # Create three monthly factors for early, mid and late 21st century eras #' ep <- CFfactor(t, era = list(early = 2021:2040, mid = 2041:2060, late = 2061:2080)) CFfactor <- function(t, period = "month", era = NULL) { if (!(inherits(t, "CFTime"))) stop("First argument to CFfactor() must be an instance of the `CFTime` class", call. = FALSE) # nocov t$factor(period, era) } #' Number of base time units in each factor level #' #' Given a factor as returned by [CFfactor()] and the [CFTime] instance from #' which the factor was derived, this function will return a numeric vector with #' the number of time units in each level of the factor. #' #' The result of this function is useful to convert between absolute and #' relative values. Climate change anomalies, for instance, are usually computed #' by differencing average values between a future period and a baseline period. #' Going from average values back to absolute values for an aggregate period #' (which is typical for temperature and precipitation, among other variables) #' is easily done with the result of this function, without having to consider #' the specifics of the calendar of the data set. #' #' If the factor `f` is for an era (e.g. spanning multiple years and the #' levels do not indicate the specific year), then the result will indicate the #' number of time units of the period in a regular single year. In other words, #' for an era of 2041-2060 and a monthly factor on a standard calendar with a #' `days` unit, the result will be `c(31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31)`. #' Leap days are thus only considered for the `366_day` and `all_leap` calendars. #' #' Note that this function gives the number of time units in each level of the #' factor - the actual number of data points in the `cf` instance per factor #' level may be different. Use [CFfactor_coverage()] to determine the actual #' number of data points or the coverage of data points relative to the factor #' level. #' #' @param t An instance of `CFTime`. #' @param f A factor or a list of factors derived from the #' parameter `t`. The factor or list thereof should generally be generated by #' the function [CFfactor()]. #' #' @returns If `f` is a factor, a numeric vector with a length equal to the #' number of levels in the factor, indicating the number of time units in each #' level of the factor. If `f` is a list of factors, a list with each element #' a numeric vector as above. #' @export #' #' @examples #' t <- CFtime("days since 2001-01-01", "365_day", 0:364) #' f <- CFfactor(t, "dekad") #' CFfactor_units(t, f) CFfactor_units <- function(t, f) { if (!inherits(t, "CFTime")) stop("First argument to `CFfactor_units()` must be an instance of the `CFTime` class", call. = FALSE) t$factor_units(f) } #' Coverage of time elements for each factor level #' #' This function calculates the number of time elements, or the relative #' coverage, in each level of a factor generated by [CFfactor()]. #' #' @param t An instance of [CFTime]. #' @param f factor or list. A factor or a list of factors derived from the #' parameter `t`. The factor or list thereof should generally be generated by #' the function [CFfactor()]. #' @param coverage "absolute" or "relative". #' @returns If `f` is a factor, a numeric vector with a length equal to the #' number of levels in the factor, indicating the number of units from the #' time series in `t` contained in each level of the factor when #' `coverage = "absolute"` or the proportion of units present relative to the #' maximum number when `coverage = "relative"`. If `f` is a list of factors, a #' list with each element a numeric vector as above. #' @export #' #' @examples #' t <- CFtime("days since 2001-01-01", "365_day", 0:364) #' f <- CFfactor(t, "dekad") #' CFfactor_coverage(t, f, "absolute") CFfactor_coverage <- function(t, f, coverage = "absolute") { if (!inherits(t, "CFTime")) stop("First argument to `CFfactor_coverage()` must be an instance of the `CFTime` class", call. = FALSE) # nocov t$factor_coverage(f, coverage) } # ============================================================================== # Regular functions #' Create a vector that represents CF timestamps #' #' This function generates a vector of character strings or `POSIXct`s that #' represent the date and time in a selectable combination for each offset. #' #' The character strings use the format `YYYY-MM-DDThh:mm:ss±hhmm`, depending on #' the `format` specifier. The date in the string is not necessarily compatible #' with `POSIXt` - in the `360_day` calendar `2017-02-30` is valid and #' `2017-03-31` is not. #' #' For the "proleptic_gregorian" calendar the output can also be generated as a #' vector of `POSIXct` values by specifying `asPOSIX = TRUE`. The same is #' possible for the "standard" and "gregorian" calendars but only if all #' timestamps fall on or after 1582-10-15. #' #' @param t The `CFTime` instance that contains the offsets to use. #' @param format character. A character string with either of the values "date" #' or "timestamp". If the argument is not specified, the format used is #' "timestamp" if there is time information, "date" otherwise. #' @param asPOSIX logical. If `TRUE`, for "standard", "gregorian" and #' "proleptic_gregorian" calendars the output is a vector of `POSIXct` - for #' other calendars an error will be thrown. Default value is `FALSE`. #' @seealso The [CFTime] `format()` method gives greater flexibility through #' the use of `strptime`-like format specifiers. #' @returns A character vector where each element represents a moment in time #' according to the `format` specifier. #' @export #' @examples #' t <- CFtime("hours since 2020-01-01", "standard", seq(0, 24, by = 0.25)) #' as_timestamp(t, "timestamp") #' #' t2 <- CFtime("days since 2002-01-21", "standard", 0:20) #' tail(as_timestamp(t2, asPOSIX = TRUE)) #' #' tail(as_timestamp(t2)) #' #' tail(as_timestamp(t2 + 1.5)) as_timestamp <- function(t, format = NULL, asPOSIX = FALSE) { if (!(inherits(t, "CFTime"))) stop("First argument to `as_timestamp()` must be an instance of the `CFTime` class", call. = FALSE) t$as_timestamp(format, asPOSIX) } #' Return the number of days in a month given a certain CF calendar #' #' Given a vector of dates as strings in ISO 8601 or UDUNITS format and a #' [CFTime] object, this function will return a vector of the same length as the #' dates, indicating the number of days in the month according to the calendar #' specification. If no vector of days is supplied, the function will return an #' integer vector of length 12 with the number of days for each month of the #' calendar (disregarding the leap day for `standard` and `julian` calendars). #' #' @param t The `CFtime` instance to use. #' @param x character. An optional vector of dates as strings with format #' `YYYY-MM-DD`. Any time part will be silently ingested. #' #' @returns A vector indicating the number of days in each month for the vector #' of dates supplied as argument `x. Invalidly specified dates will result in #' an `NA` value. If no dates are supplied, the number of days per month for #' the calendar as a vector of length 12. #' #' @export #' @seealso When working with factors generated by [CFfactor()], it is usually #' better to use [CFfactor_units()] as that will consider leap days for #' non-era factors. [CFfactor_units()] can also work with other time periods #' and calendar units, such as "hours per month", or "days per season". #' @examples #' dates <- c("2021-11-27", "2021-12-10", "2022-01-14", "2022-02-18") #' t <- CFtime("days since 1850-01-01", "standard") #' month_days(t, dates) #' #' t <- CFtime("days since 1850-01-01", "360_day") #' month_days(t, dates) #' #' t <- CFtime("days since 1850-01-01", "all_leap") #' month_days(t, dates) #' #' month_days(t) month_days <- function(t, x = NULL) { stopifnot(inherits(t, "CFTime")) if (is.null(x)) return(t$cal$month_days()) else { if (!(is.character(x))) stop("Argument `x` must be a character vector of dates in 'YYYY-MM-DD' format") ymd <- t$cal$parse(x) if (anyNA(ymd$year)) warning("Some dates could not be parsed. Result contains `NA` values.", call. = FALSE) return(t$cal$month_days(ymd)) } } #' Parse series of timestamps in CF format to date-time elements #' #' This function will parse a vector of timestamps in ISO8601 or UDUNITS format #' into a data frame with columns for the elements of the timestamp: year, #' month, day, hour, minute, second, time zone. Those timestamps that could not #' be parsed or which represent an invalid date in the indicated `CFtime` #' instance will have `NA` values for the elements of the offending timestamp #' (which will generate a warning). #' #' The supported formats are the *broken timestamp* format from the UDUNITS #' library and ISO8601 *extended*, both with minor changes, as suggested by the #' CF Metadata Conventions. In general, the format is `YYYY-MM-DD hh:mm:ss.sss #' hh:mm`. The year can be from 1 to 4 digits and is interpreted literally, so #' `79-10-24` is the day Mount Vesuvius erupted and destroyed Pompeii, not #' `1979-10-24`. The year and month are mandatory, all other fields are #' optional. There are defaults for all missing values, following the UDUNITS #' and CF Metadata Conventions. Leading zeros can be omitted in the UDUNITS #' format, but not in the ISO8601 format. The optional fractional part can have #' as many digits as the precision calls for and will be applied to the smallest #' specified time unit. In the result of this function, if the fraction is #' associated with the minute or the hour, it is converted into a regular #' `hh:mm:ss.sss` format, i.e. any fraction in the result is always associated #' with the second, rounded down to milli-second accuracy. The separator between #' the date and the time can be a single whitespace character or a `T`. #' #' The time zone is optional and should have at least the hour or `Z` if #' present, the minute is optional. The time zone hour can have an optional #' sign. In the UDUNITS format the separator between the time and the time zone #' must be a single whitespace character, in ISO8601 there is no separation #' between the time and the timezone. Time zone names are not supported (as #' neither UDUNITS nor ISO8601 support them) and will cause parsing to fail when #' supplied, with one exception: the designator "UTC" is silently dropped (i.e. #' interpreted as "00:00"). #' #' Currently only the extended formats (with separators between the elements) #' are supported. The vector of timestamps may have any combination of ISO8601 #' and UDUNITS formats. #' #' @param t An instance of `CFTime` to use when parsing the date. #' @param x Vector of character strings representing timestamps in #' ISO8601 extended or UDUNITS broken format. #' @returns A `data.frame` with constituent elements of the parsed timestamps in #' numeric format. The columns are year, month, day, hour, minute, second #' (with an optional fraction), time zone (character string), and the #' corresponding offset value from the origin. Invalid input data will appear #' as `NA` - if this is the case, a warning message will be displayed - other #' missing information on input will use default values. #' @importFrom stats na.omit #' @export #' @examples #' t <- CFtime("days since 0001-01-01", "proleptic_gregorian") #' #' # This will have `NA`s on output and generate a warning #' timestamps <- c("2012-01-01T12:21:34Z", "12-1-23", "today", #' "2022-08-16T11:07:34.45-10", "2022-08-16 10.5+04") #' parse_timestamps(t, timestamps) parse_timestamps <- function(t, x) { stopifnot(is.character(x), inherits(t, "CFTime")) if (t$cal$unit > 4) stop("Parsing of timestamps on a 'month' or 'year' time unit is not supported.", call. = FALSE) out <- t$cal$parse(x) if (anyNA(out$year)) warning("Some dates could not be parsed. Result contains `NA` values.") # nocov if (length(unique(na.omit(out$tz))) > 1) warning("Timestamps have multiple time zones. Some or all may be different from the calendar time zone.") # nocov else if (out$tz[1] != t$cal$timezone) warning("Timestamps have time zone that is different from the calendar.") # nocov out } CFtime/R/CFtime.R 0000644 0001762 0000144 00000126211 14760146635 013133 0 ustar ligges users #' @title CFTime class #' #' @description This class manages the "time" dimension of netCDF files that #' follow the CF Metadata Conventions, and its productive use in R. #' #' The class has a field `cal` which holds a specific calendar from the #' allowed types (9 named calendars are currently supported). The calendar is #' also implemented as a (hidden) class which converts netCDF file encodings to #' timestamps as character strings, and vice-versa. Bounds information (the #' period of time over which a timestamp is valid) is used when defined in the #' netCDF file. #' #' Additionally, this class has functions to ease use of the netCDF "time" #' information when processing data from netCDF files. Filtering and indexing of #' time values is supported, as is the generation of factors. #' #' @export #' @references #' https://cfconventions.org/Data/cf-conventions/cf-conventions-1.12/cf-conventions.html#time-coordinate #' @docType class CFTime <- R6::R6Class("CFTime", public = list( #' @field cal The calendar of this `CFTime` instance, a descendant of the #' [CFCalendar] class. cal = NULL, #' @field offsets A numeric vector of offsets from the origin of the #' calendar. offsets = numeric(), #' @field resolution The average number of time units between offsets. resolution = NA_real_, #' @field bounds Optional, the bounds for the offsets. If not set, it is the #' logical value `FALSE`. If set, it is the logical value `TRUE` if the #' bounds are regular with respect to the regularly spaced offsets (e.g. #' successive bounds are contiguous and at mid-points between the #' offsets); otherwise a `matrix` with columns for `offsets` and low #' values in the first row, high values in the second row. Use #' `get_bounds()` to get bounds values when they are regularly spaced. bounds = FALSE, #' @description Create a new instance of this class. #' @param definition Character string of the units and origin of the #' calendar. #' @param calendar Character string of the calendar to use. Must be one of #' the values permitted by the CF Metadata Conventions. If `NULL`, the #' "standard" calendar will be used. #' @param offsets Numeric or character vector, optional. When numeric, a #' vector of offsets from the origin in the time series. When a character #' vector of length 2 or more, timestamps in ISO8601 or UDUNITS format. #' When a character string, a timestamp in ISO8601 or UDUNITS format and #' then a time series will be generated with a separation between steps #' equal to the unit of measure in the definition, inclusive of the #' definition timestamp. The unit of measure of the offsets is defined by #' the `definition` argument. initialize = function(definition, calendar, offsets = NULL) { if (is.null(calendar)) calendar <- "standard" # This may occur when "calendar" attribute is not defined in the NC file calendar <- tolower(calendar) self$cal <- switch(calendar, "standard" = CFCalendarStandard$new(calendar, definition), "gregorian" = CFCalendarStandard$new(calendar, definition), "proleptic_gregorian" = CFCalendarProleptic$new(calendar, definition), "tai" = CFCalendarTAI$new(calendar, definition), "utc" = CFCalendarUTC$new(calendar, definition), "julian" = CFCalendarJulian$new(calendar, definition), "360_day" = CFCalendar360$new(calendar, definition), "365_day" = CFCalendar365$new(calendar, definition), "noleap" = CFCalendar365$new(calendar, definition), "366_day" = CFCalendar366$new(calendar, definition), "all_leap" = CFCalendar366$new(calendar, definition), stop("Invalid calendar specification", call. = FALSE) ) if (is.null(offsets)) return() if (is.numeric(offsets)) { dim(offsets) <- NULL stopifnot(.validOffsets(offsets)) if (length(offsets) > 1L) { self$resolution <- (max(offsets) - min(offsets)) / (length(offsets) - 1L) if (any(diff(offsets) <= 0)) warning("Offsets not monotonically increasing.", call. = FALSE) } else { self$resolution <- NA_real_ } self$offsets <- as.numeric(offsets) } else if (is.character(offsets)) { time <- self$cal$parse(offsets) if (anyNA(time$year)) stop("Argument `offsets` contains invalid timestamps", call. = FALSE) # nocov if (length(offsets) == 1L) { self$offsets <- seq(0L, time$offset[1L]) self$resolution <- 1 } else { self$offsets <- time$offset self$resolution <- (max(self$offsets) - min(self$offsets)) / (length(self$offsets) - 1L) if (any(diff(self$offsets) <= 0)) warning("Offsets not monotonically increasing.", call. = FALSE) } } else if (!is.null(offsets)) stop("Invalid offsets for CFTime object", call. = FALSE) }, #' @description Print a summary of the `CFTime` object to the console. #' @param ... Ignored. #' @return `self` invisibly. print = function(...) { noff <- length(self$offsets) if (noff == 0L) { el <- " Elements: (no elements)\n" b <- " Bounds : (not set)\n" } else { d <- self$range() el <- if (noff > 1L) { sprintf(" Elements: [%s .. %s] (average of %f %s between %d elements)\n", d[1L], d[2L], self$resolution, CFt$units$name[self$cal$unit], noff) } else paste(" Elements:", d[1L], "\n") b <- if (is.logical(self$bounds)) { if (self$bounds) " Bounds : regular and consecutive\n" else " Bounds : not set\n" } else if (noff == 1L) " Bounds : set\n" else " Bounds : irregular\n" } cal <- capture.output(self$cal$print()) cat(paste(cal, collapse = "\n"), "\nTime series:\n", el, b, sep = "") invisible(self) }, #' @description This method returns the first and last timestamp of the time #' series as a vector. Note that the offsets do not have to be sorted. #' #' @param format Value of "date" or "timestamp". Optionally, a #' character string that specifies an alternate format. #' @param bounds Logical to indicate if the extremes from the bounds should #' be used, if set. Defaults to `FALSE`. #' #' @return Vector of two character strings that represent the starting and #' ending timestamps in the time series. If a `format` is supplied, that #' format will be used. Otherwise, if all of the timestamps in the time #' series have a time component of `00:00:00` the date of the timestamp is #' returned, otherwise the full timestamp (without any time zone #' information). range = function(format = "", bounds = FALSE) { if (length(self$offsets) == 0L) return(c(NA_character_, NA_character_)) if (!missing(format) && ((!is.character(format)) || length(format) != 1L)) stop("`format` argument, when present, must be a character string with formatting specifiers", call. = FALSE) # nocov if (!is.logical(bounds) || length(bounds) != 1L) stop("`bounds` argument, when present, must be a single logical value", call. = FALSE) # nocov if (bounds) { bnds <- self$get_bounds() if (is.null(bnds)) time <- self$cal$offsets2time(base::range(self$offsets)) else time <- self$cal$offsets2time(c(bnds[1L, 1L], bnds[2L, length(self$offsets)])) } else time <- self$cal$offsets2time(base::range(self$offsets)) .format_format(time, self$cal$timezone, format) }, #' @description This method generates a vector of character strings or #' `POSIXct`s that represent the date and time in a selectable combination #' for each offset. #' #' The character strings use the format `YYYY-MM-DDThh:mm:ss±hhmm`, #' depending on the `format` specifier. The date in the string is not #' necessarily compatible with `POSIXt` - in the `360_day` calendar #' `2017-02-30` is valid and `2017-03-31` is not. #' #' For the "proleptic_gregorian" calendar the output can also be generated #' as a vector of `POSIXct` values by specifying `asPOSIX = TRUE`. The #' same is possible for the "standard" and "gregorian" calendars but only #' if all timestamps fall on or after 1582-10-15. If `asPOSIX = TRUE` is #' specified while the calendar does not support it, an error will be #' generated. #' #' @param format character. A character string with either of the values #' "date" or "timestamp". If the argument is not specified, the format #' used is "timestamp" if there is time information, "date" otherwise. #' @param asPOSIX logical. If `TRUE`, for "standard", "gregorian" and #' "proleptic_gregorian" calendars the output is a vector of `POSIXct` - #' for other calendars an error will be thrown. Default value is `FALSE`. #' #' @return A character vector where each element represents a moment in #' time according to the `format` specifier. as_timestamp = function(format = NULL, asPOSIX = FALSE) { if (asPOSIX && !self$cal$POSIX_compatible(self$offsets)) stop("Cannot make a POSIX timestamp with this calendar.", call. = FALSE) if (length(self$offsets) == 0L) return() time <- self$cal$offsets2time(self$offsets) if (is.null(format)) format <- ifelse(self$cal$unit < 4L || .has_time(time), "timestamp", "date") else if (!(format %in% c("date", "timestamp"))) stop("Format specifier not recognized", call. = FALSE) # nocov if (asPOSIX) { if (format == "date") ISOdate(time$year, time$month, time$day, 0L) else ISOdatetime(time$year, time$month, time$day, time$hour, time$minute, time$second, "UTC") } else .format_format(time, self$cal$timezone, format) }, #' @description Format timestamps using a specific format string, using the #' specifiers defined for the [base::strptime()] function, with #' limitations. The only supported specifiers are `bBdeFhHImMpRSTYz%`. #' Modifiers `E` and `O` are silently ignored. Other specifiers, including #' their percent sign, are copied to the output as if they were adorning #' text. #' #' The formatting is largely oblivious to locale. The reason for this is #' that certain dates in certain calendars are not POSIX-compliant and the #' system functions necessary for locale information thus do not work #' consistently. The main exception to this is the (abbreviated) names of #' months (`bB`), which could be useful for pretty printing in the local #' language. For separators and other locale-specific adornments, use #' local knowledge instead of depending on system locale settings; e.g. #' specify `%m/%d/%Y` instead of `%D`. #' #' Week information, including weekday names, is not supported at all as a #' "week" is not defined for non-standard CF calendars and not generally #' useful for climate projection data. If you are working with observed #' data and want to get pretty week formats, use the [as_timestamp()] #' method to generate `POSIXct` timestamps (observed data generally uses a #' "standard" calendar) and then use the [base::format()] function which #' supports the full set of specifiers. #' #' @param format A character string with `strptime` format specifiers. If #' omitted, the most economical format will be used: a full timestamp when #' time information is available, a date otherwise. #' #' @return A vector of character strings with a properly formatted #' timestamp. Any format specifiers not recognized or supported will be #' returned verbatim. format = function(format) { if (length(self$offsets) == 0L) return(character(0L)) if (!requireNamespace("stringr", quietly = TRUE)) stop("package `stringr` is required - please install it first", call. = FALSE) # nocov if (missing(format)) format <- "" else if (!is.character(format) || length(format) != 1L) stop("`format` argument must be a character string with formatting specifiers", call. = FALSE) ts <- self$cal$offsets2time(self$offsets) .format_format(ts, self$cal$timezone, format) }, #' @description Find the index in the time series for each timestamp given #' in argument `x`. Values of `x` that are before the earliest value in #' the time series will be returned as `0`; values of `x` that are after #' the latest values in the time series will be returned as #' `.Machine$integer.max`. Alternatively, when `x` is a numeric vector of #' index values, return the valid indices of the same vector, with the #' side effect being the attribute "CFTime" associated with the result. #' #' Matching also returns index values for timestamps that fall between two #' elements of the time series - this can lead to surprising results when #' time series elements are positioned in the middle of an interval (as #' the CF Metadata Conventions instruct us to "reasonably assume"): a time #' series of days in January would be encoded in a netCDF file as #' `c("2024-01-01 12:00:00", "2024-01-02 12:00:00", "2024-01-03 12:00:00", ...)` #' so `x <- c("2024-01-01", "2024-01-02", "2024-01-03")` would #' result in `(NA, 1, 2)` (or `(NA, 1.5, 2.5)` with `method = "linear"`) #' because the date values in `x` are at midnight. This situation is #' easily avoided by ensuring that this `CFTime` instance has bounds set #' (use `bounds(y) <- TRUE` as a proximate solution if bounds are not #' stored in the netCDF file). See the Examples. #' #' If bounds are set, the indices are taken from those bounds. Returned #' indices may fall in between bounds if the latter are not contiguous, #' with the exception of the extreme values in `x`. #' #' Values of `x` that are not valid timestamps according to the calendar #' of this `CFTime` instance will be returned as `NA`. #' #' `x` can also be a numeric vector of index values, in which case the #' valid values in `x` are returned. If negative values are passed, the #' positive counterparts will be excluded and then the remainder returned. #' Positive and negative values may not be mixed. Using a numeric vector #' has the side effect that the result has the attribute "CFTime" #' describing the temporal dimension of the slice. If index values outside #' of the range of `self` are provided, an error will be thrown. #' #' @param x Vector of character, POSIXt or Date values to find indices for, #' or a numeric vector. #' @param method Single value of "constant" or "linear". If `"constant"` or #' when bounds are set on `self`, return the index value for each #' match. If `"linear"`, return the index value with any fractional value. #' #' @return A numeric vector giving indices into the "time" dimension of the #' dataset associated with `self` for the values of `x`. If there is at #' least 1 valid index, then attribute "CFTime" contains an instance of #' `CFTime` that describes the dimension of filtering the dataset #' associated with `self` with the result of this function, excluding any #' `NA`, `0` and `.Machine$integer.max` values. indexOf = function(x, method = "constant") { stopifnot(inherits(x, c("character", "POSIXt", "Date")) || is.numeric(x), method %in% c("constant", "linear")) if (is.numeric(x)) { if (!(all(x < 0, na.rm = TRUE) || all(x > 0, na.rm = TRUE))) stop("Cannot mix positive and negative index values", call. = FALSE) intv <- (1:length(self$offsets))[x] xoff <- self$offsets[x] } else { if (self$cal$unit > 4L) stop("Parsing of timestamps on a 'month' or 'year' time unit is not supported.", call. = FALSE) xoff <- self$cal$parse(as.character(x))$offset vals <- self$get_bounds() vals <- if (is.null(vals)) self$offsets else c(vals[1L, 1L], vals[2L, ]) intv <- stats::approx(vals, 1L:length(vals), xoff, method = method, yleft = 0, yright = .Machine$integer.max)$y intv[which(intv == length(vals))] <- .Machine$integer.max } valid <- which(!is.na(intv) & intv > 0 & intv < .Machine$integer.max) if (any(valid)) { t <- CFTime$new(self$cal$definition, self$cal$name, xoff[valid]) bnds <- self$get_bounds() if (!is.null(bnds)) t$set_bounds(bnds[, intv[valid], drop = FALSE]) attr(intv, "CFTime") <- t } intv }, #' @description Return bounds. #' #' @param format A string specifying a format for output, optional. #' @return An array with dims(2, length(offsets)) with values for the #' bounds. `NULL` if the bounds have not been set. get_bounds = function(format) { len <- length(self$offsets) if (len == 0L) return(NULL) bnds <- self$bounds if (is.logical(bnds)) { if (!bnds) return(NULL) b <- seq(from = self$offsets[1L] - self$resolution * 0.5, by = self$resolution, length.out = len + 1L) if (!missing(format)) { ts <- self$cal$offsets2time(b) b <- .format_format(ts, self$cal$timezone, format) } return(rbind(b[1L:len], b[2L:(len+1L)])) } # bnds is a matrix if (missing(format)) return(bnds) ts <- self$cal$offsets2time(as.vector(bnds)) b <- .format_format(ts, self$cal$timezone, format) dim(b) <- c(2L, len) b }, #' @description Set the bounds of the `CFTime` instance. #' #' @param value The bounds to set, in units of the offsets. Either a matrix #' `(2, length(self$offsets))` or a single logical value. #' @return `self` invisibly. set_bounds = function(value) { if (isFALSE(value)) self$bounds <- FALSE else if (isTRUE(value)) self$bounds <- TRUE else { off <- self$offsets len <- length(off) if (len == 0L) stop("Cannot set bounds when there are no offsets", call. = FALSE) if (is.matrix(value) && is.numeric(value)) { if (!all(dim(value) == c(2L, len))) stop("Replacement value has incorrect dimensions", call. = FALSE) } else stop("Replacement value must be a numeric matrix or a single logical value", call. = FALSE) if (!(all(value[2L,] >= off) && all(off >= value[1L,]))) stop("Values of the replacement value must surround the offset values", call. = FALSE) # Compress array to `TRUE`, if regular if (len > 1L && identical(value[1L,2L:len], value[2L,1L:(len-1L)]) && diff(range(diff(value[1L,]))) == 0) value <- TRUE self$bounds <- value invisible(self) } }, #' This method returns `TRUE` if the time series has uniformly distributed #' time steps between the extreme values, `FALSE` otherwise. First test #' without sorting; this should work for most data sets. If not, only then #' offsets are sorted. For most data sets that will work but for implied #' resolutions of month, season, year, etc based on a "days" or finer #' calendar unit this will fail due to the fact that those coarser units #' have a variable number of days per time step, in all calendars except for #' `360_day`. For now, an approximate solution is used that should work in #' all but the most non-conformal exotic arrangements. #' #' @return `TRUE` if all time steps are equidistant, `FALSE` otherwise, or #' `NA` if no offsets have been set. equidistant = function() { if (length(self$offsets) == 0L) return(NA) out <- all(diff(self$offsets) == self$resolution) if (!out) { doff <- diff(sort(self$offsets)) out <- all(doff == self$resolution) if (!out) { # Don't try to make sense of totally non-standard arrangements such as # calendar units "years" or "months" describing sub-daily time steps. # Also, 360_day calendar should be well-behaved so we don't want to get here. if (self$cal$unit > 4L || inherits(self$cal, "CFCalendar360")) return(FALSE) # Check if we have monthly or yearly data on a finer-scale calendar # This is all rather approximate but should be fine in most cases # This accommodates middle-of-the-time-period offsets as per the # CF Metadata Conventions # Please report problems at https://github.com/pvanlaake/CFtime/issues ddays <- range(doff) * CFt$units$per_day[self$cal$unit] return((ddays[1] >= 28 && ddays[2] <= 31) || # months (ddays[1] >= 8 && ddays[2] <= 11) || # dekads (ddays[1] >= 90 && ddays[2] <= 92) || # seasons, quarters (ddays[1] >= 365 && ddays[2] <= 366)) # years } } out }, #' @description Given a vector of character timestamps, return a logical #' vector of a length equal to the number of time steps in the time series #' with values `TRUE` for those time steps that fall between the two #' extreme values of the vector values, `FALSE` otherwise. #' #' @param extremes Character vector of timestamps that represent the #' time period of interest. The extreme values are selected. Badly #' formatted timestamps are silently dropped. #' @param closed Is the right side closed, i.e. included in the result? #' Default is `FALSE`. A specification of `c("2022-01-01", "2023-01-01)` #' will thus include all time steps that fall in the year 2022 when #' `closed = FALSE` but include `2023-01-01` if that exact value is #' present in the time series. #' @return A logical vector with a length equal to the number of time steps #' in `self` with values `TRUE` for those time steps that fall between the #' extreme values, `FALSE` otherwise. #' #' An attribute 'CFTime' will have the same definition as `self` but with #' offsets corresponding to the time steps falling between the two #' extremes. If there are no values between the extremes, the attribute is #' `NULL`. slice = function(extremes, closed = FALSE) { if (!is.character(extremes) || length(extremes) < 1L) stop("Second argument must be a character vector of at least one timestamp.", call. = FALSE) off <- self$offsets roff <- range(off) ext <- range(self$cal$parse(extremes)$offset, na.rm = TRUE) if (all(is.na(ext)) || ext[1L] > roff[2L] || ext[2L] < roff[1L]) out <- rep(FALSE, length(off)) else { if (ext[1L] == ext[2L]) closed <- TRUE out <- if (closed) off >= ext[1L] & off <= ext[2L] else off >= ext[1L] & off < ext[2L] if (any(out)) { t <- CFTime$new(self$cal$definition, self$cal$name, off[out]) bnds <- self$get_bounds() if (!is.null(bnds)) t$set_bounds(bnds[, out, drop = FALSE]) attr(out, "CFTime") <- t } else out <- rep(FALSE, length(off)) } out }, #' @description Can the time series be converted to POSIXt? #' @return `TRUE` if the calendar support coversion to POSIXt, `FALSE` #' otherwise. POSIX_compatible = function() { self$cal$POSIX_compatible(self$offsets) }, #' @description Create a factor for a `CFTime` instance. #' #' When argument `breaks` is one of `"year", "season", "quarter", "month", #' "dekad", "day"`, a factor is generated like by [CFfactor()]. When #' `breaks` is a vector of character timestamps a factor is produced with #' a level for every interval between timestamps. The last timestamp, #' therefore, is only used to close the interval started by the #' pen-ultimate timestamp - use a distant timestamp (e.g. `range(x)[2]`) #' to ensure that all offsets to the end of the CFTime time series are #' included, if so desired. The last timestamp will become the upper bound #' in the `CFTime` instance that is returned as an attribute to this #' function so a sensible value for the last timestamp is advisable. #' #' This method works similar to [base::cut.POSIXt()] but there are some #' differences in the arguments: for `breaks` the set of options is #' different and no preceding integer is allowed, `labels` are always #' assigned using values of `breaks`, and the interval is always #' left-closed. #' #' @param breaks A character string of a factor period (see [CFfactor()] for #' a description), or a character vector of timestamps that conform to the #' calendar of `x`, with a length of at least 2. Timestamps must be given #' in ISO8601 format, e.g. "2024-04-10 21:31:43". #' #' @return A factor with levels according to the `breaks` argument, with #' attributes 'period', 'era' and 'CFTime'. When `breaks` is a factor #' period, attribute 'period' has that value, otherwise it is '"day"'. #' When `breaks` is a character vector of timestamps, attribute 'CFTime' #' holds an instance of `CFTime` that has the same definition as `x`, but #' with (ordered) offsets generated from the `breaks`. Attribute 'era' #' is always -1. cut = function(breaks) { if (missing(breaks) || !is.character(breaks) || (len <- length(breaks)) < 1L) stop("Argument 'breaks' must be a character vector with at least 1 value", call. = FALSE) # nocov if(len == 1L) { breaks <- sub("s$", "", tolower(breaks)) if (breaks %in% CFt$factor_periods) return(CFfactor(self, breaks)) # FIXME after CFfactor is done else stop("Invalid specification of 'breaks'", call. = FALSE) # nocov } # breaks is a character vector of multiple timestamps if (self$cal$unit > 4L) stop("Factorizing on a 'month' or 'year' time unit is not supported", call. = FALSE) # nocov time <- self$cal$parse(breaks) if (anyNA(time$year)) stop("Invalid specification of 'breaks'", call. = FALSE) # nocov sorted <- order(time$offset) ooff <- time$offset[sorted] intv <- findInterval(self$offsets, ooff) intv[which(intv %in% c(0L, len))] <- NA f <- factor(intv, labels = breaks[sorted][1L:(len-1L)]) # Attributes bnds <- rbind(ooff[1L:(len-1L)], ooff[2L:len]) off <- bnds[1L, ] + (bnds[2L, ] - bnds[1L, ]) * 0.5 t <- CFTime$new(self$cal$definition, self$cal$name, off) bounds(t) <- bnds attr(f, "period") <- "day" attr(f, "era") <- -1L attr(f, "CFTime") <- t f }, #' @description Generate a factor for the offsets, or a part thereof. This is #' specifically interesting for creating factors from the date part of the #' time series that aggregate the time series into longer time periods (such #' as month) that can then be used to process daily CF data sets using, for #' instance, `tapply()`. #' #' The factor will respect the calendar that the time series is built on. #' #' The factor will be generated in the order of the offsets. While typical #' CF-compliant data sources use ordered time series there is, however, no #' guarantee that the factor is ordered. For most processing with a factor #' the ordering is of no concern. #' #' If the `era` parameter is specified, either as a vector of years to #' include in the factor, or as a list of such vectors, the factor will only #' consider those values in the time series that fall within the list of #' years, inclusive of boundary values. Other values in the factor will be #' set to `NA`. The years need not be contiguous, within a single vector or #' among the list items, or in order. #' #' The following periods are supported by this method: #' #' \itemize{ #' \item `year`, the year of each offset is returned as "YYYY". #' \item `season`, the meteorological season of each offset is returned as #' "Sx", with x being 1-4, preceeded by "YYYY" if no `era` is #' specified. Note that December dates are labeled as belonging to the #' subsequent year, so the date "2020-12-01" yields "2021S1". This implies #' that for standard CMIP files having one or more full years of data the #' first season will have data for the first two months (January and #' February), while the final season will have only a single month of data #' (December). #' \item `quarter`, the calendar quarter of each offset is returned as "Qx", #' with x being 1-4, preceeded by "YYYY" if no `era` is specified. #' \item `month`, the month of each offset is returned as "01" to #' "12", preceeded by "YYYY-" if no `era` is specified. This is the default #' period. #' \item `dekad`, ten-day periods are returned as #' "Dxx", where xx runs from "01" to "36", preceeded by "YYYY" if no `era` #' is specified. Each month is subdivided in dekads as follows: 1- days 01 - #' 10; 2- days 11 - 20; 3- remainder of the month. #' \item `day`, the month and day of each offset are returned as "MM-DD", #' preceeded by "YYYY-" if no `era` is specified. #' } #' #' It is not possible to create a factor for a period that is shorter than #' the temporal resolution of the calendar. As an example, if the calendar #' has a monthly unit, a dekad or day factor cannot be created. #' #' Creating factors for other periods is not supported by this method. #' Factors based on the timestamp information and not dependent on the #' calendar can trivially be constructed from the output of the #' [as_timestamp()] function. #' #' For non-era factors the attribute 'CFTime' of the result contains a #' `CFTime` instance that is valid for the result of applying the factor to #' a resource that this instance is associated with. In other words, if #' `CFTime` instance 'At' describes the temporal dimension of resource 'A' #' and a factor 'Af' is generated from `Af <- At$factor()`, then #' `Bt <- attr(Af, "CFTime")` describes the temporal dimension of the result #' of, say, `B <- apply(A, 1:2, tapply, Af, FUN)`. The 'CFTime' attribute is #' `NULL` for era factors. #' #' @param period character. A character string with one of the values #' "year", "season", "quarter", "month" (the default), "dekad" or "day". #' @param era numeric or list, optional. Vector of years for which to #' construct the factor, or a list whose elements are each a vector of #' years. If `era` is not specified, the factor will use the entire time #' series for the factor. #' @return If `era` is a single vector or not specified, a factor with a #' length equal to the number of offsets in this instance. If `era` is a #' list, a list with the same number of elements and names as `era`, #' each containing a factor. Elements in the factor will be set to `NA` #' for time series values outside of the range of specified years. #' #' The factor, or factors in the list, have attributes 'period', 'era' #' and 'CFTime'. Attribute 'period' holds the value of the `period` #' argument. Attribute 'era' indicates the number of years that are #' included in the era, or -1 if no `era` is provided. Attribute #' 'CFTime' holds an instance of `CFTime` that has the same definition as #' this instance, but with offsets corresponding to the mid-point of #' non-era factor levels; if the `era` argument is specified, #' attribute 'CFTime' is `NULL`. factor = function(period = "month", era = NULL) { if (length(self$offsets) < 10L) stop("Cannot create a factor for very short time series", call. = FALSE) # nocov period <- tolower(period) if (!((length(period) == 1L) && (period %in% CFt$factor_periods))) stop("Period specifier must be a single value of a supported period", call. = FALSE) # nocov # No fine-grained period factors for coarse source data timestep <- CFt$units$seconds[self$cal$unit] * self$resolution; if ((period == "year") && (timestep > 86400 * 366) || (period %in% c("season", "quarter")) && (timestep > 86400 * 90) || # Somewhat arbitrary (period == "month") && (timestep > 86400 * 31) || (period == "dekad") && (timestep > 86400) || # Must be constructed from daily or finer data (period == "day") && (timestep > 86400)) # Must be no longer than a day stop("Cannot produce a short period factor from source data with long time interval", call. = FALSE) # nocov time <- self$cal$offsets2time(self$offsets) months <- c("01", "02", "03", "04", "05", "06", "07", "08", "09", "10", "11", "12") if (is.null(era)) { # Create the factor for the specified period as well as bounds dates for a # new CFtime instance for the factor. Lower bounds for the factor level is # easy, upper bound of last level takes effort. switch(period, "year" = { out <- as.factor(sprintf("%04d", time$year)) l <- levels(out) dt <- c(paste0(l, "-01-01"), sprintf("%04d-01-01", as.integer(l[nlevels(out)]) + 1L)) }, "season" = { if (!requireNamespace("stringr")) stop("Must install package `stringr` to use this functionality.", call. = FALSE) # nocov out <- as.factor( ifelse(time$month == 12L, sprintf("%04dS1", time$year + 1L), sprintf("%04dS%d", time$year, time$month %/% 3L + 1L))) l <- levels(out) dt <- ifelse(substr(l, 6L, 6L) == "1", paste0(as.integer(substr(l, 1L, 4L)) - 1L, "-12-01"), stringr::str_replace_all(l, c("S2" = "-03-01", "S3" = "-06-01", "S4" = "-09-01"))) ll <- l[nlevels(out)] lp <- as.integer(substr(ll, 6L, 6L)) if (lp == 1L) dt <- c(dt, sprintf("%04d-03-01", as.integer(substr(ll, 1L, 4L)) + 1L)) else dt <- c(dt, sprintf("%s-%02d-01", substr(ll, 1L, 4L), lp * 3L)) }, "quarter" = { if (!requireNamespace("stringr")) stop("Must install package `stringr` to use this functionality.", call. = FALSE) # nocov out <- as.factor(sprintf("%04dQ%d", time$year, (time$month - 1L) %/% 3L + 1L)) l <- levels(out) dt <- stringr::str_replace_all(l, c("Q1" = "-01-01", "Q2" = "-04-01", "Q3" = "-07-01", "Q4" = "-10-01")) ll <- l[nlevels(out)] lp <- as.integer(substr(ll, 6L, 6L)) if (lp == 4L) dt <- c(dt, sprintf("%04d-01-01", as.integer(substr(ll, 1L, 4L)) + 1L)) else dt <- c(dt, sprintf("%s-%02d-01", substr(ll, 1L, 4L), lp * 3L + 1L)) }, "month" = { out <- as.factor(sprintf("%04d-%s", time$year, months[time$month])) l <- levels(out) dt <- paste0(l, "-01") ll <- l[nlevels(out)] lp <- as.integer(substr(ll, 6L, 7L)) if (lp == 12L) dt <- c(dt, sprintf("%04d-01-01", as.integer(substr(ll, 1L, 4L)) + 1L)) else dt <- c(dt, sprintf("%s-%02d-01", substr(ll, 1L, 4L), lp + 1L)) }, "dekad" = { out <- as.factor(sprintf("%04dD%02d", time$year, (time$month - 1L) * 3L + pmin.int((time$day - 1L) %/% 10L + 1L, 3L))) l <- levels(out) dk <- as.integer(substr(l, 6L, 7L)) - 1L dt <- sprintf("%s-%02d-%s", substr(l, 1L, 4L), dk %/% 3L + 1L, c("01", "11", "21")[dk %% 3L + 1L]) ll <- l[nlevels(out)] lp <- as.integer(substr(ll, 6L, 7L)) yr <- as.integer(substr(ll, 1L, 4L)) if (lp == 36L) dt <- c(dt, sprintf("%04d-01-01", yr + 1L)) else dt <- c(dt, sprintf("%04d-%02d-%s", yr, (lp + 1L) %/% 3L + 1L, c("01", "11", "21")[(lp + 1L) %% 3L + 1L])) }, "day" = { out <- as.factor(sprintf("%04d-%02d-%02d", time$year, time$month, time$day)) l <- levels(out) lp <- l[nlevels(out)] last <- self$cal$offsets2time(self$cal$parse(lp)$offset) dt <- c(l, sprintf("%04d-%02d-%02d", last$year, last$month, last$day)) } ) # Convert bounds dates to an array of offsets, find mid-points, create new CFTime instance off <- self$cal$parse(dt)$offset off[is.na(off)] <- 0 # This can happen only when the time series starts at or close to the origin, for seasons noff <- length(off) bnds <- rbind(off[1L:(noff - 1L)], off[2L:noff]) off <- bnds[1L,] + (bnds[2L,] - bnds[1L,]) * 0.5 new_cf <- CFTime$new(self$cal$definition, self$cal$name, off) bounds(new_cf) <- TRUE # Bind attributes to the factor attr(out, "era") <- -1L attr(out, "period") <- period attr(out, "CFTime") <- new_cf return(out) } # Era factor if (is.numeric(era)) ep <- list(era) else if ((is.list(era) && all(unlist(lapply(era, is.numeric))))) ep <- era else stop("When specified, the `era` parameter must be a numeric vector or a list thereof", call. = FALSE) out <- lapply(ep, function(years) { f <- switch(period, "year" = ifelse(time$year %in% years, sprintf("%04d", time$year), NA_character_), "season" = ifelse((time$month == 12L) & ((time$year + 1L) %in% years), "S1", ifelse((time$month < 12L) & (time$year %in% years), sprintf("S%d", time$month %/% 3L + 1L), NA_character_)), "quarter" = ifelse(time$year %in% years, sprintf("Q%d", (time$month - 1L) %/% 3L + 1L), NA_character_), "month" = ifelse(time$year %in% years, months[time$month], NA_character_), "dekad" = ifelse(time$year %in% years, sprintf("D%02d", (time$month - 1L) * 3L + pmin.int((time$day - 1L) %/% 10L + 1L, 3L)), NA_character_), "day" = ifelse(time$year %in% years, sprintf("%s-%02d", months[time$month], time$day), NA_character_) ) f <- as.factor(f) attr(f, "era") <- length(years) attr(f, "period") <- period attr(f, "CFTime") <- NULL f }) if (is.numeric(era)) out <- out[[1L]] else names(out) <- names(era) out }, #' @description Given a factor as produced by `CFTime$factor()`, this method #' will return a numeric vector with the number of time units in each #' level of the factor. #' #' The result of this method is useful to convert between absolute and #' relative values. Climate change anomalies, for instance, are usually #' computed by differencing average values between a future period and a #' baseline period. Going from average values back to absolute values for #' an aggregate period (which is typical for temperature and #' precipitation, among other variables) is easily done with the result of #' this method, without having to consider the specifics of the calendar #' of the data set. #' #' If the factor `f` is for an era (e.g. spanning multiple years and the #' levels do not indicate the specific year), then the result will #' indicate the number of time units of the period in a regular single #' year. In other words, for an era of 2041-2060 and a monthly factor on a #' standard calendar with a `days` unit, the result will be #' `c(31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31)`. Leap days are thus #' only considered for the `366_day` and `all_leap` calendars. #' #' Note that this function gives the number of time units in each level of #' the factor - the actual number of data points in the time series per #' factor level may be different. Use [CFfactor_coverage()] to determine #' the actual number of data points or the coverage of data points #' relative to the factor level. #' #' @param f A factor or a list of factors derived from the method #' `CFTime$factor()`. #' @return If `f` is a factor, a numeric vector with a length equal to the #' number of levels in the factor, indicating the number of time units in #' each level of the factor. If `f` is a list of factors, a list with each #' element a numeric vector as above. factor_units = function(f) { if (is.list(f)) factors <- f else factors <- list(f) if (!(all(unlist(lapply(factors, function(x) is.factor(x) && is.numeric(attr(x, "era")) && attr(x, "period") %in% CFt$factor_periods))))) stop("Argument `f` must be a factor generated by the method `CFTime$factor()`", call. = FALSE) # nocov out <- lapply(factors, function(fac) .factor_units(fac, self$cal, CFt$units$per_day[self$cal$unit])) if (is.factor(f)) out <- out[[1L]] out }, #' @description Calculate the number of time elements, or the relative #' coverage, in each level of a factor generated by `CFTime$factor()`. #' #' @param f A factor or a list of factors derived from the method #' `CFTime$factor()`. #' @param coverage "absolute" or "relative". #' @return If `f` is a factor, a numeric vector with a length equal to the #' number of levels in the factor, indicating the number of units from the #' time series contained in each level of the factor when #' `coverage = "absolute"` or the proportion of units present relative to the #' maximum number when `coverage = "relative"`. If `f` is a list of factors, a #' list with each element a numeric vector as above. factor_coverage = function(f, coverage = "absolute") { if (is.list(f)) factors <- f else factors <- list(f) if (!(all(unlist(lapply(factors, function(x) is.factor(x) && is.numeric(attr(x, "era")) && attr(x, "period") %in% CFt$factor_periods))))) stop("Argument `f` must be a factor generated by the method `CFTime$factor()`", call. = FALSE) # nocov if (!(is.character(coverage) && coverage %in% c("absolute", "relative"))) stop("Argument `coverage` must be a character string with a value of 'absolute' or 'relative'", call. = FALSE) # nocov if (coverage == "relative") { out <- lapply(factors, function(fac) { res <- tabulate(fac) / .factor_units(fac, self$cal, CFt$units$per_day[self$cal$unit]) yrs <- attr(fac, "era") if (yrs > 0) res <- res / yrs return(res) }) } else { out <- lapply(factors, tabulate) } if (is.factor(f)) out <- out[[1L]] out } ), active = list( #' @field unit (read-only) The unit string of the calendar and time series. unit = function(value) { if (missing(value)) CFt$units$name[self$cal$unit] } ) ) CFtime/R/CFCalendar366.R 0000644 0001762 0000144 00000010306 14731346614 014137 0 ustar ligges users #' @title 366-day CF calendar #' #' @description This class represents a CF calendar of 366 days per year, having #' leap days in every year. This calendar is not compatible with the standard #' POSIXt calendar. #' #' This calendar supports dates before year 1 and includes the year 0. #' #' @aliases CFCalendar366 #' @docType class CFCalendar366 <- R6::R6Class("CFCalendar366", inherit = CFCalendar, private = list( # Rata Die, the number of days from the day before 0001-01-01 to # origin of this calendar. Used to convert offsets from the calendar origin # to the day before 0001-01-01 for arithmetic calculations. rd = 0L ), public = list( #' @description Create a new CF calendar of 366 days per year. #' @param nm The name of the calendar. This must be "366_day" or "all_leap". #' @param definition The string that defines the units and the origin, as #' per the CF Metadata Conventions. #' @return A new instance of this class. initialize = function(nm, definition) { super$initialize(nm, definition) private$rd <- self$date2offset(self$origin) }, #' @description Indicate which of the supplied dates are valid. #' @param ymd `data.frame` with dates parsed into their parts in columns #' `year`, `month` and `day`. Any other columns are disregarded. #' @return Logical vector with the same length as argument `ymd` has rows #' with `TRUE` for valid days and `FALSE` for invalid days, or `NA` where #' the row in argument `ymd` has `NA` values. valid_days = function(ymd) { ymd$year & ymd$month >= 1L & ymd$month <= 12L & ymd$day >= 1L & ymd$day <= c(31L, 29L, 31L, 30L, 31L, 30L, 31L, 31L, 30L, 31L, 30L, 31L)[ymd$month] }, #' @description Determine the number of days in the month of the calendar. #' @param ymd `data.frame`, optional, with dates parsed into their parts. #' @return A vector indicating the number of days in each month for the #' dates supplied as argument `ymd`. If no dates are supplied, the number #' of days per month for the calendar as a vector of length 12. month_days = function(ymd = NULL) { if (is.null(ymd)) return(c(31L, 29L, 31L, 30L, 31L, 30L, 31L, 31L, 30L, 31L, 30L, 31L)) res <- c(31L, 29L, 31L, 30L, 31L, 30L, 31L, 31L, 30L, 31L, 30L, 31L)[ymd$month] res[which(is.na(ymd$year))] <- NA res }, #' @description Indicate which years are leap years. #' @param yr Integer vector of years to test. #' @return Logical vector with the same length as argument `yr`. Since in #' this calendar all years have a leap day, all values will be `TRUE`, or #' `NA` where argument `yr` is `NA`. leap_year = function(yr) { res <- rep(TRUE, length(yr)) res[which(is.na(yr))] <- NA res }, #' @description Calculate difference in days between a `data.frame` of time #' parts and the origin. #' #' @param x `data.frame`. Dates to calculate the difference for. #' #' @return Integer vector of a length equal to the number of rows in #' argument `x` indicating the number of days between `x` and the `origin`, #' or `NA` for rows in `x` with `NA` values. date2offset = function(x) { corr <- ifelse(x$month <= 2L, 0L, -1L) 366L * (x$year - 1L) + (367L * x$month - 362L) %/% 12L + corr + x$day - private$rd }, #' @description Calculate date parts from day differences from the origin. This #' only deals with days as these are impacted by the calendar. #' Hour-minute-second timestamp parts are handled in [CFCalendar]. #' #' @param x Integer vector of days to add to the origin. #' @return A `data.frame` with columns 'year', 'month' and 'day' and as many #' rows as the length of vector `x`. offset2date = function(x) { d0 <- x - 1L + private$rd # d0 is offset relative to year 0, 0-based yr <- d0 %/% 366L + 1L # full years d1 <- d0 %% 366L # remaining days corr <- ifelse(d1 < 60L, 0L, 1L) # correct for days past February mon <- (12L * (d1 + corr) + 373L) %/% 367L day <- d1 - (367L * mon - 362L) %/% 12L + corr + 1L data.frame(year = yr, month = mon, day = day) } ) ) CFtime/R/CFtime-package.R 0000644 0001762 0000144 00000006071 14731346245 014522 0 ustar ligges users #' CFtime: working with CF Metadata Conventions "time" dimensions #' #' Support for all calendars as specified in the Climate and Forecast #' (CF) Metadata Conventions for climate and forecasting data. The CF Metadata #' Conventions is widely used for distributing files with climate observations #' or projections, including the Coupled Model Intercomparison Project (CMIP) #' data used by climate change scientists and the Intergovernmental Panel on #' Climate Change (IPCC). This package specifically allows the user to work #' with any of the CF-compliant calendars (many of which are not compliant with #' POSIXt). The CF time coordinate is formally defined in the #' [CF Metadata Conventions document](https://cfconventions.org/Data/cf-conventions/cf-conventions-1.12/cf-conventions.html#time-coordinate). #' #' The package can create a [CFTime] instance from scratch or, more commonly, it #' can use the dimension attributes and dimension variable values from a netCDF #' resource. The package does not actually do any of the reading and the user is #' free to use their netCDF package of preference. The recommended package to #' use (with any netCDF resources) is [ncdfCF](https://cran.r-project.org/package=ncdfCF). #' `ncdfCF` will automatically use this package to manage the "time" dimension #' of any netCDF resource. As with this package, it reads and interprets the #' attributes of the resource to apply the CF Metadata Conventions, supporting #' axes, auxiliary coordinate variables, coordinate reference systems, etc. #' Alternatively, for more basic netCDF reading and writing, the two main options #' are [RNetCDF](https://cran.r-project.org/package=RNetCDF) and #' [ncdf4](https://cran.r-project.org/package=ncdf4)). #' #' **Create, modify, inquire** #' * [CFtime()]: Create a [CFTime] instance #' * [`Properties`][properties] of the `CFTime` instance #' * [parse_timestamps()]: Parse a vector of character timestamps into `CFTime` elements #' * [`Compare`][CFtime-equivalent] two `CFTime` instances #' * [`Merge`][CFtime-merge] two `CFTime` instances or append additional time steps to a `CFTime` instance #' * [as_timestamp()] and [format()]: Generate a vector of character or `POSIXct` timestamps from a `CFTime` instance #' * [range()]: Timestamps of the two endpoints in the time series #' * [is_complete()]: Does the `CFTime` instance have a complete time series between endpoints? #' * [month_days()]: How many days are there in a month using the calendar of the `CFTime` instance? #' #' **Factors and coverage** #' * [CFfactor()] and [cut()]: Create factors for different time periods #' * [CFfactor_units()]: How many units of time are there in each factor level? #' * [CFfactor_coverage()]: How much data is available for each level of the factor? #' #' **Filtering and selection** #' * [slab()]: Logical vector of time steps between two extreme points. #' * [indexOf()]: Index values in the time series of given timestamps, possibly with #' fractional part for interpolation. #' @keywords internal #' @aliases CFtime-package "_PACKAGE" ## usethis namespace: start ## usethis namespace: end NULL CFtime/vignettes/ 0000755 0001762 0000144 00000000000 14761414434 013441 5 ustar ligges users CFtime/vignettes/Conformance.Rmd 0000644 0001762 0000144 00000014321 14741270365 016341 0 ustar ligges users --- title: "Conformance with the CF Metadata Conventions" output: rmarkdown::html_vignette vignette: > %\VignetteIndexEntry{Conformance with the CF Metadata Conventions} %\VignetteEngine{knitr::rmarkdown} %\VignetteEncoding{UTF-8} --- ```{r, include = FALSE} knitr::opts_chunk$set( collapse = TRUE, comment = "#>" ) ``` `CFtime` is based on version 1.12 of the CF Metadata Conventions. The text for the *time coordinate* in the conventions may be consulted [here](https://cfconventions.org/Data/cf-conventions/cf-conventions-1.12/cf-conventions.html#time-coordinate). > The *time coordinate* is one of four coordinate types that receive "special treatment" in the conventions. The other three are longitude, latitude and the vertical. If you require convention-compliant support for any of these three other coordinate types, please consider using package [`ncdfCF`](https://cran.r-project.org/package=ncdfCF) which supports all three coordinate types and links with `CFtime` for support of the time coordinate. This document sets out how the `CFtime` package conforms to the CF Metadata Conventions, by section of the conventions. This information is mostly useful for developers and expert users. If you have issues reading a netCDF file that is due to conformance of package `CFtime` with the CF Metadata Conventions, please [open an issue on GitHub](https://github.com/pvanlaake/CFtime/issues). Please note that there are many netCDF files out there that are not claiming adherence to the CF Metadata Conventions but whose time coordinate can still be successfully handled by `CFtime`: the `netCDF` library itself provides the basic plumbing. ## 4.4. Time Coordinate A `CFTime` object is constructed from information that is contained in the `units` and `calendar` attributes for a time coordinate read out of a netCDF file. The package does not actually access the netCDF file, it only uses the information that was read out of the file by e.g. `ncdfCF`. Consequently, the `CFtime` package can also construct a `CFTime` object from suitable character strings. This package is agnostic to the orientation of the axes in any data variable that references the time coordinate. Consequently, the `standard_name` and `axis` attributes are not considered by this package (but the `ncdfCF` package handles both). Identification of a time coordinate is done by the `units` attribute, alone. ## 4.4.1. Time Coordinate Units The `CFtime` package fully supports the units `"second"`, `"minute"`, `"hour"` and `"day"`, including abbreviated and/or plural forms. Unit `"second"` is the SI second, a `"minute"` equals 60 seconds, an `"hour"` equals 3,600 seconds, and a `"day"` equals 86,400 seconds. This is exactly as expected, but refer to the `utc` calendar, below, for peculiarities of that calendar. The `units` `"month"` and `"year"` are accepted on input but not using their definition in UDUNITS. Instead, `"year"` is a calendar year, so either 360, 365 or 366 days depending on its value and the calendar. A `"month"` is similarly a calendar month. Use of either of these time units is discouraged by the CF Metadata Conventions. Other UDUNITS time units are not supported by this package. All variants of the glue word `"since"` are accepted, being `"after"`, `"from"`, `"ref"` and `"per"`. The *"reference datetime string"* should be formatted using the UDUNITS broken timestamp format or following [ISO8601](https://en.wikipedia.org/wiki/ISO_8601) rules, but noting that datetimes valid in specific calendars other than Gregorian (such as `2023-02-30` in the `360_day` calendar) are acceptable as well. The UDUNITS "packed" format is not supported. Timezone information can only use `00`, `15`, `30` and `45` to indicate minutes; other minute offsets have never been used anywhere. A time zone value of `"UTC"` is accepted, as an extension to the conventions. Even though the conventions don't indicate it, the `tai` and `utc` calendars can carry no time zone indication as that does not exist for either of these calendars. ## 4.4.2. Calendar If a `calendar` attribute is not given, `"standard"` is assumed, being the default calendar as per the conventions. * `standard` (or the deprecated `gregorian`): Fully conformant, but leap seconds are never considered (see below). The combination of a *reference datetime* and other *datetimes* spanning the gap between 1582-10-05 and 1582-10-15, in either direction, is supported. * `proleptic_gregorian`: Fully conformant, but leap seconds are never considered (see below). * `julian`: Fully conformant, but, despite the suggestion in the conventions, leap seconds do not exist in this calendar and are thus never considered. * `utc`: Fully conformant. Leap seconds are always accounted so when a leap second is included, UTC time progresses like `23:59:58 ... 23:59:59 ... 23:59:60 ... 00:00:00 ... 00:00:01`. This also extends to minutes `23:59:00 ... 23:59:60 ... 00:00:59 ... 00:01:59`, always adding 60 seconds. Likewise for `hours` and `days`. Units `"year"` and `"month"` are not allowed, and neither is any time zone indication. * `tai`: Fully conformant. Units `"year"` and `"month"` are not allowed, and neither is any time zone indication. * `no_leap` / `365_day`: Fully conformant. * `all_leap` / `366_day`: Fully conformant. * `360_day`: Fully conformant. * `none`: Not implemented. ## 4.4.3. Leap seconds The `utc` calendar fully supports leap seconds. The `julian` calendar has no concept of leap seconds so these are never possible or considered. Using a leap second in a `julian` calendar is an error. In the `standard` and `proleptic_gregorian` calendars only the variant without leap seconds is considered. The `units_metadata` attribute is not considered, so assumed to be `"leap seconds: unknown"`. The assumption here is that if second accuracy for a data producer is essential, then the entire tool chain from observation equipment, to processing, to file recording will have to be of known characteristics with regards to UTC time and leap seconds and thus the `utc` calendar would be used, rather than `standard` or `proleptic_gregorian` with a caveat communicated through the `units_metdata` attribute. ## 4.4.4. Time Coordinates with no Annual Cycle Not implemented. ## 4.4.5. Explicitly Defined Calendar Not implemented. CFtime/vignettes/Processing.Rmd 0000644 0001762 0000144 00000035550 14760342033 016223 0 ustar ligges users --- title: "Processing climate projection data" output: rmarkdown::html_vignette vignette: > %\VignetteIndexEntry{Processing climate projection data} %\VignetteEngine{knitr::rmarkdown} %\VignetteEncoding{UTF-8} --- ```{r, include = FALSE} knitr::opts_chunk$set( collapse = TRUE, comment = "#>" ) ``` ```{r setup, include = FALSE} library(CFtime) ``` Climate projection data sets are produced in a variety of formats but all conform to the CF Metadata Conventions. NetCDF data files, in particular, are highly structured and relatively easy to process. That said, it is very important to maintain a proper processing workflow such that the small changes in the climate projections are maintained and revealed through analysis. In this document, the basic workflow with varying calendars is described. > In this vignette we are using the `ncdfCF` package as that provides the easiest interface to work with netCDF files. Package `CFtime` is integrated into `ncdfCF` which makes working with time dimensions in netCDF seamless. > Packages `RNetCDF` and `ncdf4` can work with `CFtime` as well but then the "intelligence" built into `ncdfCF` is not available, such as automatically identifying axes and data orientation. Other packages like `terra` and `stars` are not recommended because they do not provide access to the specifics of the time dimension of the data and do not consider any calendars other than "proleptic_gregorian". ## Processing climate projection data Individual files containing climate projections contain global, regional or local data, typically on a rectangular latitude-longitude grid, for a single parameter such as "near-surface temperature", and for a number of time steps. An analysis workflow then consists of a number of steps: - Download the appropriate data files for your desired combination of model, experiment, realization, geography, time range, parameter, ... (called a "data suite" henceforth). If your analysis involves multiple parameters (such as temperature and precipitation to estimate crop productivity), repeat the process for all parameters. If you want to make a multi-model ensemble to reduce model bias, repeat again for all desired model, experiment and realization combinations ("ensemble member"). You end up having one or more data suites to work with. - Take all files in a data suite and extract the data. Process the data in the data suite. Since the data are (mostly) 3-dimensional arrays, this will involve binding the arrays on the right dimension and then do something like `apply(data, 1:2, tapply, f, fun)` (following the CF Metadata Conventions, dimensions 1 and 2 are "longitude" and "latitude", respectively; the third dimension is "time"; none of this is standardized though and deviations are out there; package `ncdfCF` can help with the `CFData$array()` method). Repeat for the data suite for each ensemble member. - Combine the above results as your workflow requires. Frequently this involves computing "anomalies": ratio the data for one or more future periods to a baseline period. Repeat for each ensemble member. - Construct the multi-model ensemble from the individual ensemble members. Apart from the first step of obtaining the data, the steps lend themselves well to automation. The catch, however, is in the factor `f` to use with `tapply()`. The different models (in your ensemble) use different calendars, meaning that different factors are required. The `CFtime` package can help out. The `CFTime$factor()` method produces a factor that respects the calendar of the data files. The method comes in two operating modes: - Plain vanilla mode produces a factor for a time period across the entire time series. The factor level includes the year. This would be useful to calculate mean temperature for every month in every year, for instance. - When one or more "eras" (periods of interest) are provided, the factor level no longer includes the year and can be used to calculate, for instance, the mean temperature per period of interest in the era (e.g. average March temperature in the era 2041-2060). ```{r} # Setting up library(ncdfCF) fn <- list.files(path = system.file("extdata", package = "CFtime"), full.names = TRUE)[1] (ds <- ncdfCF::open_ncdf(fn)) # The T axis, with name "time" has a CFTime instance t <- ds[["time"]]$time() # Create monthly factors for a baseline era and early, mid and late 21st century eras baseline <- t$factor(era = 1991:2020) future <- t$factor(era = list(early = 2021:2040, mid = 2041:2060, late = 2061:2080)) str(baseline) str(future) ``` Building on the examples above of opening a file, creating a `CFTime` instance and a suitable factor for one data suite, here daily rainfall, the actual processing of the data into precipitation anomalies for 3 periods relative to a baseline period could look like this: ```{r} # Get the data for the "pr" data variable from the netCDF data set. # The `CFData$array()` method ensures that data are in standard R orientation. # Converts units of kg m-2 s-1 to mm/day. pr <- ds[["pr"]]$data()$array() * 86400 # Get a global attribute from the file experiment <- ds$attribute("experiment_id") # Calculate the daily average precipitation per month for the baseline period # and the three future eras. pr_base <- apply(pr, 1:2, tapply, baseline, mean) # an array pr_future <- lapply(future, function(f) apply(pr, 1:2, tapply, f, mean)) # a list of arrays # Calculate the precipitation anomalies for the future eras against the baseline. # Working with daily averages per month so we can simply subtract and then multiply by days # per month for each of the factor levels using the CF calendar. ano <- mapply(function(pr, f) {(pr - pr_base) * t$factor_units(f)}, pr_future, future, SIMPLIFY = FALSE) # Plot the results plot(1:12, ano$early[,1,1], type = "o", col = "blue", ylim = c(-50, 40), xlim = c(1, 12), main = paste0("Hamilton, New Zealand\nExperiment: ", experiment), xlab = "month", ylab = "Precipitation anomaly (mm)") lines(1:12, ano$mid[,1,1], type = "o", col = "green") lines(1:12, ano$late[,1,1], type = "o", col = "red") ``` Looks like Hadley will be needing rubber boots in spring and autumn back home! The interesting feature, working from opening the netCDF file down to plotting, is that the specifics of the CF calendar that the data suite uses do not have to be considered anywhere in the processing workflow: the `CFtime` package provides the functionality. Data suites using another CF calendar are processed exactly the same. ## Combining data from different models with different calendars Different climate projection data sets can use different calendars. It is absolutely essential to respect the calendar of the different data sets because the underlying solar and atmospheric physics are based on those calendars as well. In a typical situation, a researcher would construct a multi-model ensemble to remove or reduce the bias in any given model. The data sets composing the ensemble might well use different calendars. The correct way of constructing an ensemble is to perform the desired analysis on every ensemble member individually and to combine them only in the final step and to then perform any ensemble operations such as computing confidence intervals. The design of the `CFtime` package makes it easy to do this, through its heavy use of lists. Building on the previous example, let's make a multi-model ensemble of 2 models (not much of an ensemble but such are the limitations of including data with packages - the example easily extends to a larger set of ensemble members). ```{r} # Get the list of files that make up the ensemble members, here: # GFDL ESM4 and MRI ESM2 models for experiment SSP2-4.5, precipitation, CMIP6 2015-01-01 to 2099-12-31 lf <- list.files(path = system.file("extdata", package = "CFtime"), full.names = TRUE) # Loop over the files individually # ano is here a list with each element holding the results for a single model ano <- lapply(lf, function(fn) { ds <- ncdfCF::open_ncdf(fn) t <- ds[["time"]]$time() pr <- ds[["pr"]]$data()$array() * 86400 baseline <- t$factor(era = 1991:2020) pr_base <- apply(pr, 1:2, tapply, baseline, mean) future <- t$factor(era = list(early = 2021:2040, mid = 2041:2060, late = 2061:2080)) pr_future <- lapply(future, function(f) apply(pr, 1:2, tapply, f, mean)) mapply(function(pr, f) {(pr - pr_base) * t$factor_units(f)}, pr_future, future, SIMPLIFY = FALSE) }) # Era names eras <- c("early", "mid", "late") dim(eras) <- 3 # Build the ensemble for each era # For each era, grab the data for each of the ensemble members, simplify to an array # and take the mean per row (months, in this case) ensemble <- apply(eras, 1, function(e) { rowMeans(sapply(ano, function(a) a[[e]], simplify = TRUE))}) colnames(ensemble) <- eras rownames(ensemble) <- rownames(ano[[1]][[1]]) ensemble ``` Here we simply compute the average of the monthly precipitation anomaly over the ensemble members. In a more typical scenario, you would use the values from the individual models and to apply a more suitable analysis, such as calculating the confidence interval or model agreement. One significant advantage of this processing workflow is that it is easily parallelized: the bulk of the work goes into computing the anomalies, `ano`, and this is [embarrassingly parallel](https://en.wikipedia.org/wiki/Embarrassingly_parallel) because they read their own data and produce independent outputs. Use [package future](https://cran.r-project.org/package=future) or something similar to easily make the code run on all available CPU cores. ## Working with multiple files in a single data suite Due to the large size of typical climate projection data files, it is common to have a data suite that is contained in multiple files. A case in point is the CORDEX data set which breaks up the experiment period of 2006 - 2100 into 19 files of 5 years each, with each file covering a single parameter (temperature, precipitation, etc) over an entire domain (such as Europe, South Asia, Central America and the Caribbean, etc). The CFtime package can streamline processing of such multi-file data suites as well. Assuming that you have your CORDEX files in a directory on disk, organized by domain and other properties such as the variable, GCM/RCM combination, experiment, etc, the process of preparing the files for processing could be encoded in a function as below. The argument `fn` is a list of file names to process, and `var` is the variable contained in the files. (There are no checks on argument sanity here, which should really be included. This function only makes sense for a single [domain, GCM/RCM, experiment, variable] combination. Also be aware of data size, CORDEX files are huge and stitching all domain data together will easily exhaust available memory and it may thus lead to very large swap files and very poor performance - use the `ncdfCF::CFVariable$subset()` method to read spatial or temporal chunks of data to avoid such problems.) ```{r eval = FALSE} library(ncdfCF) library(abind) prepare_CORDEX <- function(fn, var, aoi) { data <- vector("list", length(fn)) for (i in 1:length(fn)) { ds <- ncdfCF::open_ncdf(fn[i]) if (i == 1) { # Get a CFTime instance from the first file t <- ds[["time"]]$time() } else { # Add offsets from the file and add to the CFTime instance t <- t + ds[["time"]]$time()$offsets } # Put the subsetted data array in the list data[[i]] <- ds[[var]]$subset(aoi = aoi)$array() } # Create a list for output with the CFTime instance and # the data bound in a single 3-dimensional array list(CFTime = t, data = abind(data, along = 3)) } ``` Calling this function like `prepare_CORDEX(list.files(path = "~/CC/CORDEX/CAM", pattern = "^pr.*\\.nc$", full.names = TRUE), "pr", ncdfCF::aoi(0, 20, 30, 50))` will yield a list of netCDF files with precipitation data, with the resulting `CFTime` instance describing the full temporal extent covered by the data files, as well as the data bound on the temporal dimension, ready for further processing. When working like this it is imperative that the offsets and the data arrays are added to their final structures *in exactly the same order*. It is not necessary that the offsets (and the data) themselves are in order, but the correspondence between offsets and data needs to be maintained. (`list.files()` produces a list in alphabetical order by default, which for most climate projection files produces offsets in chronological order.) ## Acknowledgements The results presented contain modified data from Copernicus Climate Change Service information, 2023-2025. Neither the European Commission nor ECMWF is responsible for any use that may be made of the Copernicus information or data it contains. We acknowledge the World Climate Research Programme, which, through its Working Group on Coupled Modelling, coordinated and promoted CMIP6. We thank the climate modeling groups for producing and making available their model output, the Earth System Grid Federation (ESGF) for archiving the data and providing access, and the multiple funding agencies who support CMIP6 and ESGF. The two data sets used as examples in this vignette carry the following license statements: - **pr_day_GFDL-ESM4_ssp245_r1i1p1f1_gr1_20150101-20991231_v20180701.nc:** CMIP6 model data produced by NOAA-GFDL is licensed under a Creative Commons Attribution-ShareAlike 4.0 International License (https://creativecommons.org/licenses/). Consult https://pcmdi.llnl.gov/CMIP6/TermsOfUse/ for terms of use governing CMIP6 output, including citation requirements and proper acknowledgment. Further information about this data, including some limitations, can be found via the further_info_url (recorded as a global attribute in this file). The data producers and data providers make no warranty, either express or implied, including, but not limited to, warranties of merchantability and fitness for a particular purpose. All liabilities arising from the supply of the information (including any liability arising in negligence) are excluded to the fullest extent permitted by law. - **pr_day_MRI-ESM2-0_ssp245_r1i1p1f1_gn_20150101-20991231_v20190603.nc:** CMIP6 model data produced by MRI is licensed under a Creative Commons Attribution-ShareAlike 4.0 International License (https://creativecommons.org/licenses/). Consult https://pcmdi.llnl.gov/CMIP6/TermsOfUse/ for terms of use governing CMIP6 output, including citation requirements and proper acknowledgment. Further information about this data, including some limitations, can be found via the further_info_url (recorded as a global attribute in this file). The data producers and data providers make no warranty, either express or implied, including, but not limited to, warranties of merchantability and fitness for a particular purpose. All liabilities arising from the supply of the information (including any liability arising in negligence) are excluded to the fullest extent permitted by law. CFtime/vignettes/CFtime.Rmd 0000644 0001762 0000144 00000053016 14761411610 015252 0 ustar ligges users --- title: "Working with CFtime" output: rmarkdown::html_vignette vignette: > %\VignetteIndexEntry{Working with CFtime} %\VignetteEngine{knitr::rmarkdown} %\VignetteEncoding{UTF-8} --- ```{r, include = FALSE} knitr::opts_chunk$set( collapse = TRUE, comment = "#>" ) ``` ```{r setup, include = FALSE} library(CFtime) library(ncdfCF) ``` ## Climate change models and calendars Around the world, many climate change models are being developed (100+) under the umbrella of the [World Climate Research Programme](https://www.wcrp-climate.org) to assess the rate of climate change. Published data is generally publicly available to download for research and other (non-commercial) purposes through partner organizations in the Earth Systems Grid Federation. The data are all formatted to comply with the [CF Metadata Conventions](http://cfconventions.org), a set of standards to support standardization among research groups and published data sets. These conventions greatly facilitate use and analysis of the climate projections because standard processing work flows (should) work across the various data sets. On the flip side, the CF Metadata Conventions needs to cater to a wide range of modeling requirements and that means that some of the areas covered by the standards are more complex than might be assumed. One of those areas is the temporal dimension of the data sets. The CF Metadata Conventions supports no less than 11 different calendar definitions, that, upon analysis, fall into 8 distinct calendars (from the perspective of computation of climate projections): - `standard` (or `gregorian`): The Gregorian calendar that is in common use in many countries around the world, adopted by edict of Pope Gregory XIII in 1582 and in effect from 15 October of that year. The earliest valid time in this calendar is 0001-01-01 00:00:00 (1 January of year 1) as year 0 does not exist and the CF Metadata Conventions require the year to be positive, but noting that a Julian calendar is used in periods before the Gregorian calendar was introduced. - `proleptic_gregorian`: This is the Gregorian calendar with validity extended to periods prior to `1582-10-15`, including a year 0 and negative years. This calendar is being used in most OSes and is what is being used by R. - `tai`: International Atomic Time, a global standard for linear time: it counts seconds since its start at 1958-01-01 00:00:00. For presentation it uses the Gregorian calendar. Timestamps prior to its start are not allowed. - `utc`: Coordinated Universal Time, the standard for civil timekeeping all over the world. It is based on International Atomic Time but it uses occasional leap seconds to remain synchronous with Earth's rotation around the Sun; at the end of 2024 it is 37 seconds behind `tai`. It uses the Gregorian calendar with a start at 1972-01-01 00:00:00; earlier timestamps are not allowed. Future timestamps are also not allowed because the insertion of leap seconds is unpredictable. Most computer clocks use UTC. - `julian`: Adopted in the year 45 BCE, every fourth year is a leap year. Originally, the Julian calendar did not have a monotonically increasing year assigned to it and there are indeed several Julian calendars in use around the world today with different years assigned to them. Common interpretation is currently that the year is the same as that of the Gregorian calendar. The Julian calendar is currently 13 days behind the Gregorian calendar. As with the standard calendar, the earliest valid time in this calendar is 0001-01-01 00:00:00. - `365_day` or `noleap`: "Model time" in which no years have a leap day. Negative years are allowed and year 0 exists. - `366_day` or `all_leap`: "Model time" in which all years have a leap day. Negative years are allowed and year 0 exists. - `360_day`: "Model time" in which every year has 12 months of 30 days each. Negative years are allowed and year 0 exists. The three latter calendars of model time are specific to the CF Metadata Conventions to reduce computational complexities of working with dates. None of the 8 calendars are compliant with the standard `POSIXt` date/time facilities in `R` and using standard date/time functions would quickly lead to problems. See the section on "CFtime and POSIXt", below, for a detailed description of the discrepancies between the CF calendars and POSIXt. In the below code snippet, the date of `1949-12-01` is the *origin* from which other dates are calculated. When adding 43,289 days to this origin for a data set that uses the `360_day` calendar, that should yield a date some 120 years after the origin: ```{r} # POSIXt calculations on a standard calendar - INCORRECT as.Date("1949-12-01") + 43289 # CFtime calculation on a "360_day" calendar - CORRECT # See below examples for details on the two functions as_timestamp(CFtime("days since 1949-12-01", "360_day", 43289)) ``` Using standard `POSIXt` calculations gives a result that is about 21 months off from the correct date - obviously an undesirable situation. This example is far from artificial: `1949-12-01` is the origin for all CORDEX data, covering the period 1950 - 2005 for historical experiments and the period 2006 - 2100 for RCP experiments (with some deviation between data sets), and several models used in the CORDEX set use the `360_day` calendar. The `365_day` or `noleap` calendar deviates by about 1 day every 4 years (disregarding centurial years), or about 24 days in a century. The `366_day` or `all_leap` calendar deviates by about 3 days every 4 years, or about 76 days in a century. The `CFtime` package deals with the complexity of the different calendars allowed by the CF Metadata Conventions. It properly formats dates and times (even oddball dates like `2070-02-30`) and it can generate calendar-aware factors for further processing of the data. ##### Time zones The character of CF time series - a number of numerical offsets from a base date - implies that there should only be a single time zone associated with the time series, and then only for the `standard` and `proleptic_gregorian` calendars. For the other calendars a time zone can be set but it will have no effect. Daylight savings time information is never considered by `CFtime` so the user should take care to avoid entering times with DST. The time zone offset from UTC is stored in the `CFTime` instance and can be retrieved with the `timezone()` function. If a vector of character timestamps with time zone information is parsed with the `parse_timestamps()` function and the time zones are found to be different from the `CFTime` time zone, a warning message is generated but the timestamp is interpreted as being in the `CFTime` time zone. No correction of timestamp to `CFTime` time zone is performed. The concept of time zones does not apply to the `utc` and `tai` calendars as they represent universal time, i.e. the indicated time is valid all over the globe. Timestamps passed to these calendars should not have a time zone indicated, but if there are, anything other than a 0 offset will generate an error. ## Using CFtime to deal with calendars Data sets that are compliant with the CF Metadata Conventions always include a *origin*, a specific point in time in reference to a specified *calendar*, from which other points in time are calculated by adding a specified *offset* of a certain *unit*. This approach is encapsulated in the `CFtime` package by the R6 class `CFTime`. ```{r} # Create a CFTime object from a definition string, a calendar and some offsets (t <- CFtime("days since 1949-12-01", "360_day", 19830:90029)) ``` The `CFtime()` function takes a description (which is actually a unit - "days" - in reference to an origin - "1949-12-01"), a calendar description, and a vector of *offsets* from that origin. Once a `CFTime` instance is created its origin and calendar cannot be changed anymore. Offsets may be added. In practice, these parameters will be taken from the data set of interest. CF Metadata Conventions require data sets to be in the netCDF format, with all metadata describing the data set included in a single file, including the mandatory "Conventions" global attribute which should have a string identifying the version of the CF Metadata Conventions that this file adheres to (among possible others). Not surprisingly, all the pieces of interest are contained in the "time" dimension of the file. The process then becomes as follows, for a CMIP6 file of daily precipitation: ```{r} # Opening a data file that is included with the package. # Usually you would `list.files()` on a directory of your choice. fn <- list.files(path = system.file("extdata", package = "CFtime"), full.names = TRUE)[1] (ds <- ncdfCF::open_ncdf(fn)) # "Conventions" global attribute must have a string like "CF-1.*" for this package to work reliably # Look at the "time" axis (time <- ds[["time"]]) # Get the CFTime instance from the "time" axis (t <- time$time()) ``` You can see from the global attribute "Conventions" that the file adheres to the CF Metadata Conventions, among others. According to the CF conventions, `units` and `calendar` are required attributes of the "time" dimension in the netCDF file. The above example (and others in this vignette) use the `ncdfCF` package. If you are using the `RNetCDF` or `ncdf4` package, checking for CF conventions and then creating a `CFTime` instance goes like this: ```{r, eval = FALSE} library(RNetCDF) nc <- open.nc(fn) att.get.nc(nc, -1, "Conventions") t <- CFtime(att.get.nc(nc, "time", "units"), att.get.nc(nc, "time", "calendar"), var.get.nc(nc, "time")) library(ncdf4) nc <- nc_open(fn) nc_att_get(nc, 0, "Conventions") t <- CFtime(nc$dim$time$units, nc$dim$time$calendar, nc$dim$time$vals) ``` The character representations of the time series can be easily generated: ```{r} dates <- t$as_timestamp(format = "date") dates[1:10] ``` ...as well as the range of the time series: ```{r} t$range() ``` Note that in this latter case, if any of the timestamps in the time series have a time that is other than `00:00:00` then the time of the extremes of the time series is also displayed. This is a common occurrence because the CF Metadata Conventions prescribe that the middle of the time period (month, day, etc) is recorded, which for months with 31 days would be something like `2005-01-15T12:00:00`. ## Supporting processing of climate projection data When working with high resolution climate projection data, typically at a "day" resolution, one of the processing steps would be to aggregate the data to some lower resolution such as a dekad (10-day period), a month or a meteorological season, and then compute a derivative value such as the dekadal sum of precipitation, monthly minimum/maximum daily temperature, or seasonal average daily short-wave irradiance. It is also possible to create factors for multiple "eras" in one go. This greatly reduces programming effort if you want to calculate anomalies over multiple future periods. A complete example is provided in the vignette ["Processing climate projection data"](Processing.html). It is easy to generate the factors that you need once you have a `CFTime` instance prepared: ```{r} # Create a dekad factor for the whole `t` time series that was created above f_k <- t$factor("dekad") str(f_k) # Create monthly factors for a baseline era and early, mid and late 21st century eras baseline <- t$factor(era = 1991:2020) future <- t$factor(era = list(early = 2021:2040, mid = 2041:2060, late = 2061:2080)) str(future) ``` For the "era" version, there are two interesting things to note here: - The eras do not have to coincide with the boundaries of the time series. In the example above, the time series starts in 2015, while the baseline era is from 1991. Obviously, the number of time steps from the time series that then fall within this era will then be reduced. - The factor is always of the same length as the time series, with `NA` values where the time series values are not falling in the era. This ensures that the factor is compatible with the data set which the time series describes, such that functions like `tapply()` will not throw an error. There are six periods defined for factoring: - `year`, to summarize data to yearly timescales - `season`, the meteorological seasons. Note that the month of December will be added to the months of January and February of the following year, so the date "2020-12-01" yields the factor value "2021S1". - `quarter`, the standard quarters of the year. - `month`, monthly summaries, the default period. - `dekad`, 10-day period. Each month is subdivided in dekads as follows: (1) days 01 - 10; (2) days 11 - 20; (3) remainder of the month. - `day`, to summarize sub-daily data. ##### New "time" dimension A `CFTime` instance describes the "time" dimension of an associated data set. When you process that dimension of the data set using `CFTime$factor()` or another method to filter or otherwise subset the "time" dimension, the resulting data set will have a different "time" dimension. To associate a proper `CFTime` instance with your processing result, the methods in this package return that `CFTime` instance as an attribute: ```{r} (new_time <- attr(f_k, "CFTime")) ``` In the vignette ["Processing climate projection data"](Processing.html) is a fully worked out example of this. ##### Incomplete time series You can test if your time series is complete with the function `is_complete()`. A time series is considered complete if the time steps between the two extreme values are equally spaced. There is a "fuzzy" assessment of completeness for time series with a datum unit of "days" or smaller where the time steps are months or years apart - these have different lengths in days in different months or years (e.g. a leap year). If your time series is incomplete, for instance because it has missing time steps, you should recognize that in your further processing. As an example, you might want to filter out months that have fewer than 90% of daily data from further processing or apply weights based on the actual coverage. ```{r} # Is the time series complete? is_complete(t) # How many time units fit in a factor level? t$factor_units(baseline) # What's the absolute and relative coverage of our time series t$factor_coverage(baseline, "absolute") t$factor_coverage(baseline, "relative") ``` The time series is complete but coverage of the baseline era is only 20%! Recall that the time series starts in 2015 while the baseline period in the factor is for `1991:2020` so that's only 6 years of time series data out of 30 years of the baseline factor. An artificial example of missing data: ```{r} # 4 years of data on a `365_day` calendar, keep 80% of values n <- 365 * 4 cov <- 0.8 offsets <- sample(0:(n-1), n * cov) (t <- CFtime("days since 2020-01-01", "365_day", offsets)) # Note that there are about 1.25 days between observations mon <- t$factor("month") t$factor_coverage(mon, "absolute") t$factor_coverage(mon, "relative") ``` Keep in mind, though, that there are data sets where the time unit is lower than the intended resolution of the data. Since the CF conventions recommend that the coarsest time unit is "day", many files with monthly data sets have a definition like `days since 2016-01-01` with offset values for the middle of the month like `15, 44, 74, 104, ...`. Even in these scenarios you can verify that your data set is complete with the function `CFcomplete()`. ## CFtime and POSIXt The CF Metadata Conventions supports 11 different calendars. None of these are fully compatible with POSIXt, the basis for timekeeping on virtually all computers. The reason for this is that POSIXt does not consider leap seconds (just like the `tai` calendar) but computer clocks are periodically synchronized using Network Time Protocol servers that report UTC time. The problem is easily demonstrated: ```{r} # 1970-01-01 is the origin of POSIXt difftime(as.POSIXct("2024-01-01"), as.POSIXct("1970-01-01"), units = "sec") # Leap seconds in UTC .leap.seconds ``` `difftime()` is off by 27 seconds, the number of leap seconds in UTC since their introduction in 1972. Your computer may have the correct time based on UTC, but calculations over periods that include leap seconds are always off by a number of seconds. ##### Duh! If 27 seconds is of no concern to you or your application - perhaps your data has a daily resolution - then you can safely forget about the leap seconds in several of the calendars, in particular `standard` (for periods after 1582-10-15), `proleptic_gregorian` and `tai`. The `utc` calendar does account for leap seconds so consider if you should use that - this is the only calendar that considers leap seconds in calculation. These calendars support the generation of timestamps in POSIXct with the `as_timestamp()` function but note the potential for a discrepancy due to the presence of leap seconds. ##### When seconds count If second accuracy is of concern, then you should carefully consider the time keeping in the source of your data and use a matching calendar. The `utc` calendar is a sensible option if your equipment synchronizes time with an NTP server or a computer that does so. Even then you should ensure that time is accurate after a new leap second is introduced. ##### Bigger problems The other calendars have discrepancies with POSIXt that are much larger, namely one or more days. These calendars do not support POSIXct timestamps and an error will be thrown if you try. If you really want the timestamps in POSIXct then you can generate the timestamps as character strings using this package, and then convert to a `POSIXct` or `Date` using the available R tools. Converting time series using these incompatible calendars to `POSIXct` or `Date` is likely to produce problems. This is most pronounced for the `360_day` calendar: ```{r} # Days in January and February t <- CFtime("days since 2023-01-01", "360_day", 0:59) ts_days <- t$as_timestamp("date") as.Date(ts_days) ``` 31 January is missing from the vector of `Date`s because the `360_day` calendar does not include it and 29 and 30 February are `NA`s because POSIXt rejects them. This will produce problems later on when processing your data. The general advice is therefore: **do not convert CFTime objects to Date objects** unless you are sure that the `CFTime` object uses a POSIXt-compatible calendar. The degree of incompatibility for the various calendars is as follows: - `standard`: Only valid for periods after 1582-10-15. The preceeding period uses the Julian calendar. - `julian`: Every fouth year is a leap year. Dates like `2100-02-29` and `2200-02-29` are valid. - `365_day` or `noleap`: No leap year exists. `2020-02-29` does not occur. - `366_day` or `all_leap`: All years are leap years. - `360_day`: All months have 30 days in every year. This means that 31 January, March, May, July, August, October and December never occur, while 29 and 30 February occur in every year. ##### So how do I compare climate projection data with different calendars? One reason to convert the time dimension from different climate projection data sets is to be able to compare the data from different models and produce a multi-model ensemble. The correct procedure to do this is to first calculate **for each data set individually** the property of interest (e.g. average daily rainfall per month anomaly for some future period relative to a baseline period), which will typically involve aggregation to a lower resolution (such as from daily data to monthly averages), and only then combine the aggregate data from multiple data sets to compute statistically interesting properties (such as average among models and standard deviation, etc). Once data is aggregated from daily or higher-resolution values to a lower temporal resolution - such as a "month" - the different calendars no longer matter (although if you do need to convert averaged data (e.g. average daily precipitation in a month) to absolute data (e.g. precipitation per month) you should use `CFfactor_units()` to make sure that you use the correct scaling factor). Otherwise, there really shouldn't be any reason to convert the time series in the data files to `Date`s. Climate projection data is virtually never compared on a day-to-day basis between different models and neither does complex date arithmetic make much sense (such as adding intervals) - `CFtime` can support basic arithmetic by manipulation the offsets of the `CFTime` object. The character representations that are produced are perfectly fine to use for `dimnames()` on an array or as `rownames()` in a `data.frame` and these also support basic logical operations such as `"2023-02-30" < "2023-03-01"`. So ask yourself, do you really need `Date`s when working with unprocessed climate projection data? (If so, [open an issue on GitHub](https://github.com/pvanlaake/CFtime/issues)). A complete example of creating a multi-model ensemble is provided in the vignette ["Processing climate projection data"](Processing.html). ## Final observations - This package is intended to facilitate processing of climate projection data. It is not a full implementation of the CF Metadata Conventions "time" component. - In parsing and deparsing of offsets and timestamps, data is rounded to 3 digits of precision of the unit of the calendar. When using a description of time that is very different from the calendar unit, this may lead to some loss of precision due to rounding errors. For instance, if milli-second precision is required, use a unit of "seconds". The authors have no knowledge of published climate projection data that requires milli-second precision so for the intended use of the package this issue is marginal. CFtime/NAMESPACE 0000644 0001762 0000144 00000001263 14732102433 012641 0 ustar ligges users # Generated by roxygen2: do not edit by hand S3method("+",CFTime) S3method("==",CFTime) S3method(as.character,CFTime) S3method(cut,CFTime) S3method(length,CFTime) S3method(range,CFTime) export("bounds<-") export(CFTime) export(CFcomplete) export(CFfactor) export(CFfactor_coverage) export(CFfactor_units) export(CFmonth_days) export(CFparse) export(CFsubset) export(CFtime) export(CFtimestamp) export(as_timestamp) export(bounds) export(calendar) export(definition) export(indexOf) export(is_complete) export(month_days) export(offsets) export(origin) export(parse_timestamps) export(resolution) export(slab) export(slice) export(timezone) export(unit) import(R6) importFrom(stats,na.omit) CFtime/TODO 0000644 0001762 0000144 00000000732 14760424753 012127 0 ustar ligges users # every release - update documentation: README, vignettes, package doc - update hyperlinks (to e.g. latest CF version) # 1.6 - Climatology - Paleocalendar - equidistant by element and calendar # Further away - cut() needs to support "2 days" etc - filter() like tidyverse -> CFsubset - CFfactor() can use epochs with lower resolution such as [year, month] for 3-hr data: e.g. daily avg per month per epoch - Epoch factor periods should have climatological CFtime CFtime/LICENSE 0000644 0001762 0000144 00000000061 14734460247 012436 0 ustar ligges users YEAR: 2022-2025 COPYRIGHT HOLDER: CFtime authors CFtime/NEWS.md 0000644 0001762 0000144 00000016543 14761414370 012537 0 ustar ligges users # CFtime 1.5.1 * `CFTime$slice()` can now work with a single or multiple timestamp values. * Test coverage increased. * Documentation updated, all runnable example code uses `ncdfCF` package. * Minor code fixes. # CFtime 1.5.0 * Code is updated to align with the CF 1.12 release. Specifically, calendars `utc` and `tai` are added. * `standard` calendar now uses mixed Gregorian/Julian calendar as defined in the CF Metadata Conventions. `proleptic_gregorian` is now a separate calendar with its own code base. * Negative offsets from a calendar origin are allowed. * Code is refactored to R6. R6 class CFTime replaces S4 class CFtime (note the difference in case). S4 class CFdatum has been replaced by hierarchy of R6 CFCalendar classes, with various non-exported functions converted into methods of CFCalendar. The code is now much cleaner and easier to extend, such as with the two new calendars `utc` and `tai`. * ISO8601 timestamps can use a comma "," as decimal mark to separate a fractional part from a time element, in addition to the dot ".". * Do not drop degenerate dimension on bounds when only 1 offset is included in subsetting. # CFtime 1.4.1 * Method `slab()` has an additional argument `rightmost.closed` to indicate if the upper extreme value should be included in the result. * Fixed bounds information on CFtime instances returned from methods `indexOf()` and `slab()`. * Several minor code improvements. # CFtime 1.4.0 * Bounds that define intervals around offsets can be associated with a CFtime instance and retrieved as raw offset values or as formatted timestamps. * Methods that subset a CF time series (e.g. `CFfactor()`, `cut()`, `slab()`) now have an attribute "CFtime" (among possible others) that describes the "time" dimension of the analysis result applying the subset. In other words, if CFtime instance 'Acf' describes the temporal dimension of data set 'A' and a factor 'Af' is generated from 'Acf', then `Bcf <- attr(Af, "CFtime")` describes the temporal dimension of the result of, say, `B <- apply(A, 1:2, tapply, Af, FUN)`. * New `indexOf()` method added that returns the indices of supplied timestamps in a CFtime instance, optionally with a fractional part. This can be used to extract specific time steps, or to interpolate between time steps using the fractional part, from the time dimension of the data set associated with the CFtime instance. A vector of indices (e.g. referring to slices of the data set) can also be supplied, in which case valid indices are returned, with the new CFtime instance. * New `cut()` method added to generate a factor, similar to `cut.POSIXt()` but with some differences in the arguments. * `CFfactor()` now supports a period "quarter", for calendar quarters. * `format()` method added that generates a character vector of timestamps for the offsets in a CFtime instance. The format is specified using the flags used in `strptime()`, with some limitations. In particular, locale-specific formatting is limited to month names and no weekday information can be generated. The `range()` method has a new "format" parameter to support the same functionality and timestamps can also be generated for the extremes of the bounds, if set. * `as_character()` and `length()` methods added that return a vector of timestamps or the number of offsets in a CFtime instance, respectively. * Several functions have been renamed (most notably `CFtimestamp()` to `as_timestamp()`, `CFcomplete()` to `is_complete()`, `CFrange()` to the standard generic method `range()`, and `CFsubset()` to `slab()`) to be more consistent with the R universe. The original functions are now flagged as being deprecated. Some datum functions (deep down where regular mortals do not dwell) have been deleted. * Time zone designator "UTC" accepted when parsing timestamps to offsets. * Minor code fixes, see GitHub commits. * Documentation updated, with description of new functions. # CFtime 1.3.0 * Two CFtime instances can be added if they have compatible calendars and units. The earlier origin is preserved in the result and offsets from the later instance are incremented by the appropriate amount. As before, in the result offsets are in the order of the specified CFtime instances. * A new function CFsubset() can be used to retrieve a logical vector that indicates which time steps in a CFtime instance fall between two timestamps. This is useful to slice data during reading from file or analysis. * Time zone information is managed at the level of the datum. If a vector of character timestamps is parsed and found to have different time zones, a warning is generated. * Much reduced memory footprint. * Minor code fixes, see GitHub commits. * Codecov.io test coverage monitoring added. * Documentation updated, with description of new functions. # CFtime 1.2.0 * Datum units "years" and "months" added. While these units are discouraged by the CF Metadata Conventions due to their problematic definition, there are quite a few data sets out there that use these units nonetheless. For this reason, reading existing files with such datum units is supported (converting offsets to time elements is easy) but parsing timestamps is not (calculating offsets from time elements is possible but tedious and slow). Should there be a definite need, open an issue on GitHub and make a *very good* case why this functionality is required. * CFresolution() returns the average separation between elements in a time series, in units of the datum. * CFcomplete() indicates if the time series is complete, meaning that there are no gaps in the time series. This also works for time series with a somewhat variable length such as monthly data with a "days" datum unit. This works for all but the most exotic time dimension constructions. * CFtimestamp() produces a timestamp for all midnight values if the datum unit is "hours", "minutes" or "seconds". The "time" format has been removed. For "standard", "gregorian" and "proleptic_gregorian" calendars output can be generated as POSIXct by specifying the new argument `asPOSIX = TRUE` -- defaults to `FALSE`, the previous behaviour so the API is not broken. * Minor documentation updates. * Assorted minor code fixes, see GitHub commits. # CFtime 1.1.0 * CFtime() can now also be invoked with a vector of character timestamps as offsets, or with a single timestamp to create a complete time series from the datum to the indicated timestamp. * CFtimestamp() can now automatically select the best format for the time series. * New CFfactor_units() and CFfactor_coverage() functions. CFfactor_units() will tell you how many time units compose every level of a factor. CFfactor_coverage() computes the actual or relative number of time units in the factor levels from the time series in a CFtime instance with which the factor was created. This will enable you to assess the completeness of your time series (and perhaps filter out factor levels below a certain coverage threshold) and it can be useful in computing absolute values from average values, as is often useful when computing anomalies. * Global constants are now defined in a package environment, CFt. * Documentation expanded, updated and fixed. * Assorted minor code fixes, see GitHub commits. # CFtime 1.0.0 * This version supports all CF Metadata Conventions calendars for use with climate projection data. * You can create timestamps from the offsets in the files and create factors that greatly simplify working with climate change data. CFtime/inst/ 0000755 0001762 0000144 00000000000 14761414434 012406 5 ustar ligges users CFtime/inst/doc/ 0000755 0001762 0000144 00000000000 14761414434 013153 5 ustar ligges users CFtime/inst/doc/Conformance.R 0000644 0001762 0000144 00000000217 14761414433 015527 0 ustar ligges users ## ----include = FALSE---------------------------------------------------------- knitr::opts_chunk$set( collapse = TRUE, comment = "#>" ) CFtime/inst/doc/Processing.R 0000644 0001762 0000144 00000010365 14761414434 015417 0 ustar ligges users ## ----include = FALSE---------------------------------------------------------- knitr::opts_chunk$set( collapse = TRUE, comment = "#>" ) ## ----setup, include = FALSE--------------------------------------------------- library(CFtime) ## ----------------------------------------------------------------------------- # Setting up library(ncdfCF) fn <- list.files(path = system.file("extdata", package = "CFtime"), full.names = TRUE)[1] (ds <- ncdfCF::open_ncdf(fn)) # The T axis, with name "time" has a CFTime instance t <- ds[["time"]]$time() # Create monthly factors for a baseline era and early, mid and late 21st century eras baseline <- t$factor(era = 1991:2020) future <- t$factor(era = list(early = 2021:2040, mid = 2041:2060, late = 2061:2080)) str(baseline) str(future) ## ----------------------------------------------------------------------------- # Get the data for the "pr" data variable from the netCDF data set. # The `CFData$array()` method ensures that data are in standard R orientation. # Converts units of kg m-2 s-1 to mm/day. pr <- ds[["pr"]]$data()$array() * 86400 # Get a global attribute from the file experiment <- ds$attribute("experiment_id") # Calculate the daily average precipitation per month for the baseline period # and the three future eras. pr_base <- apply(pr, 1:2, tapply, baseline, mean) # an array pr_future <- lapply(future, function(f) apply(pr, 1:2, tapply, f, mean)) # a list of arrays # Calculate the precipitation anomalies for the future eras against the baseline. # Working with daily averages per month so we can simply subtract and then multiply by days # per month for each of the factor levels using the CF calendar. ano <- mapply(function(pr, f) {(pr - pr_base) * t$factor_units(f)}, pr_future, future, SIMPLIFY = FALSE) # Plot the results plot(1:12, ano$early[,1,1], type = "o", col = "blue", ylim = c(-50, 40), xlim = c(1, 12), main = paste0("Hamilton, New Zealand\nExperiment: ", experiment), xlab = "month", ylab = "Precipitation anomaly (mm)") lines(1:12, ano$mid[,1,1], type = "o", col = "green") lines(1:12, ano$late[,1,1], type = "o", col = "red") ## ----------------------------------------------------------------------------- # Get the list of files that make up the ensemble members, here: # GFDL ESM4 and MRI ESM2 models for experiment SSP2-4.5, precipitation, CMIP6 2015-01-01 to 2099-12-31 lf <- list.files(path = system.file("extdata", package = "CFtime"), full.names = TRUE) # Loop over the files individually # ano is here a list with each element holding the results for a single model ano <- lapply(lf, function(fn) { ds <- ncdfCF::open_ncdf(fn) t <- ds[["time"]]$time() pr <- ds[["pr"]]$data()$array() * 86400 baseline <- t$factor(era = 1991:2020) pr_base <- apply(pr, 1:2, tapply, baseline, mean) future <- t$factor(era = list(early = 2021:2040, mid = 2041:2060, late = 2061:2080)) pr_future <- lapply(future, function(f) apply(pr, 1:2, tapply, f, mean)) mapply(function(pr, f) {(pr - pr_base) * t$factor_units(f)}, pr_future, future, SIMPLIFY = FALSE) }) # Era names eras <- c("early", "mid", "late") dim(eras) <- 3 # Build the ensemble for each era # For each era, grab the data for each of the ensemble members, simplify to an array # and take the mean per row (months, in this case) ensemble <- apply(eras, 1, function(e) { rowMeans(sapply(ano, function(a) a[[e]], simplify = TRUE))}) colnames(ensemble) <- eras rownames(ensemble) <- rownames(ano[[1]][[1]]) ensemble ## ----eval = FALSE------------------------------------------------------------- # library(ncdfCF) # library(abind) # # prepare_CORDEX <- function(fn, var, aoi) { # data <- vector("list", length(fn)) # for (i in 1:length(fn)) { # ds <- ncdfCF::open_ncdf(fn[i]) # if (i == 1) { # # Get a CFTime instance from the first file # t <- ds[["time"]]$time() # } else { # # Add offsets from the file and add to the CFTime instance # t <- t + ds[["time"]]$time()$offsets # } # # # Put the subsetted data array in the list # data[[i]] <- ds[[var]]$subset(aoi = aoi)$array() # } # # # Create a list for output with the CFTime instance and # # the data bound in a single 3-dimensional array # list(CFTime = t, data = abind(data, along = 3)) # } CFtime/inst/doc/Conformance.Rmd 0000644 0001762 0000144 00000014321 14741270365 016053 0 ustar ligges users --- title: "Conformance with the CF Metadata Conventions" output: rmarkdown::html_vignette vignette: > %\VignetteIndexEntry{Conformance with the CF Metadata Conventions} %\VignetteEngine{knitr::rmarkdown} %\VignetteEncoding{UTF-8} --- ```{r, include = FALSE} knitr::opts_chunk$set( collapse = TRUE, comment = "#>" ) ``` `CFtime` is based on version 1.12 of the CF Metadata Conventions. The text for the *time coordinate* in the conventions may be consulted [here](https://cfconventions.org/Data/cf-conventions/cf-conventions-1.12/cf-conventions.html#time-coordinate). > The *time coordinate* is one of four coordinate types that receive "special treatment" in the conventions. The other three are longitude, latitude and the vertical. If you require convention-compliant support for any of these three other coordinate types, please consider using package [`ncdfCF`](https://cran.r-project.org/package=ncdfCF) which supports all three coordinate types and links with `CFtime` for support of the time coordinate. This document sets out how the `CFtime` package conforms to the CF Metadata Conventions, by section of the conventions. This information is mostly useful for developers and expert users. If you have issues reading a netCDF file that is due to conformance of package `CFtime` with the CF Metadata Conventions, please [open an issue on GitHub](https://github.com/pvanlaake/CFtime/issues). Please note that there are many netCDF files out there that are not claiming adherence to the CF Metadata Conventions but whose time coordinate can still be successfully handled by `CFtime`: the `netCDF` library itself provides the basic plumbing. ## 4.4. Time Coordinate A `CFTime` object is constructed from information that is contained in the `units` and `calendar` attributes for a time coordinate read out of a netCDF file. The package does not actually access the netCDF file, it only uses the information that was read out of the file by e.g. `ncdfCF`. Consequently, the `CFtime` package can also construct a `CFTime` object from suitable character strings. This package is agnostic to the orientation of the axes in any data variable that references the time coordinate. Consequently, the `standard_name` and `axis` attributes are not considered by this package (but the `ncdfCF` package handles both). Identification of a time coordinate is done by the `units` attribute, alone. ## 4.4.1. Time Coordinate Units The `CFtime` package fully supports the units `"second"`, `"minute"`, `"hour"` and `"day"`, including abbreviated and/or plural forms. Unit `"second"` is the SI second, a `"minute"` equals 60 seconds, an `"hour"` equals 3,600 seconds, and a `"day"` equals 86,400 seconds. This is exactly as expected, but refer to the `utc` calendar, below, for peculiarities of that calendar. The `units` `"month"` and `"year"` are accepted on input but not using their definition in UDUNITS. Instead, `"year"` is a calendar year, so either 360, 365 or 366 days depending on its value and the calendar. A `"month"` is similarly a calendar month. Use of either of these time units is discouraged by the CF Metadata Conventions. Other UDUNITS time units are not supported by this package. All variants of the glue word `"since"` are accepted, being `"after"`, `"from"`, `"ref"` and `"per"`. The *"reference datetime string"* should be formatted using the UDUNITS broken timestamp format or following [ISO8601](https://en.wikipedia.org/wiki/ISO_8601) rules, but noting that datetimes valid in specific calendars other than Gregorian (such as `2023-02-30` in the `360_day` calendar) are acceptable as well. The UDUNITS "packed" format is not supported. Timezone information can only use `00`, `15`, `30` and `45` to indicate minutes; other minute offsets have never been used anywhere. A time zone value of `"UTC"` is accepted, as an extension to the conventions. Even though the conventions don't indicate it, the `tai` and `utc` calendars can carry no time zone indication as that does not exist for either of these calendars. ## 4.4.2. Calendar If a `calendar` attribute is not given, `"standard"` is assumed, being the default calendar as per the conventions. * `standard` (or the deprecated `gregorian`): Fully conformant, but leap seconds are never considered (see below). The combination of a *reference datetime* and other *datetimes* spanning the gap between 1582-10-05 and 1582-10-15, in either direction, is supported. * `proleptic_gregorian`: Fully conformant, but leap seconds are never considered (see below). * `julian`: Fully conformant, but, despite the suggestion in the conventions, leap seconds do not exist in this calendar and are thus never considered. * `utc`: Fully conformant. Leap seconds are always accounted so when a leap second is included, UTC time progresses like `23:59:58 ... 23:59:59 ... 23:59:60 ... 00:00:00 ... 00:00:01`. This also extends to minutes `23:59:00 ... 23:59:60 ... 00:00:59 ... 00:01:59`, always adding 60 seconds. Likewise for `hours` and `days`. Units `"year"` and `"month"` are not allowed, and neither is any time zone indication. * `tai`: Fully conformant. Units `"year"` and `"month"` are not allowed, and neither is any time zone indication. * `no_leap` / `365_day`: Fully conformant. * `all_leap` / `366_day`: Fully conformant. * `360_day`: Fully conformant. * `none`: Not implemented. ## 4.4.3. Leap seconds The `utc` calendar fully supports leap seconds. The `julian` calendar has no concept of leap seconds so these are never possible or considered. Using a leap second in a `julian` calendar is an error. In the `standard` and `proleptic_gregorian` calendars only the variant without leap seconds is considered. The `units_metadata` attribute is not considered, so assumed to be `"leap seconds: unknown"`. The assumption here is that if second accuracy for a data producer is essential, then the entire tool chain from observation equipment, to processing, to file recording will have to be of known characteristics with regards to UTC time and leap seconds and thus the `utc` calendar would be used, rather than `standard` or `proleptic_gregorian` with a caveat communicated through the `units_metdata` attribute. ## 4.4.4. Time Coordinates with no Annual Cycle Not implemented. ## 4.4.5. Explicitly Defined Calendar Not implemented. CFtime/inst/doc/Processing.Rmd 0000644 0001762 0000144 00000035550 14760342033 015735 0 ustar ligges users --- title: "Processing climate projection data" output: rmarkdown::html_vignette vignette: > %\VignetteIndexEntry{Processing climate projection data} %\VignetteEngine{knitr::rmarkdown} %\VignetteEncoding{UTF-8} --- ```{r, include = FALSE} knitr::opts_chunk$set( collapse = TRUE, comment = "#>" ) ``` ```{r setup, include = FALSE} library(CFtime) ``` Climate projection data sets are produced in a variety of formats but all conform to the CF Metadata Conventions. NetCDF data files, in particular, are highly structured and relatively easy to process. That said, it is very important to maintain a proper processing workflow such that the small changes in the climate projections are maintained and revealed through analysis. In this document, the basic workflow with varying calendars is described. > In this vignette we are using the `ncdfCF` package as that provides the easiest interface to work with netCDF files. Package `CFtime` is integrated into `ncdfCF` which makes working with time dimensions in netCDF seamless. > Packages `RNetCDF` and `ncdf4` can work with `CFtime` as well but then the "intelligence" built into `ncdfCF` is not available, such as automatically identifying axes and data orientation. Other packages like `terra` and `stars` are not recommended because they do not provide access to the specifics of the time dimension of the data and do not consider any calendars other than "proleptic_gregorian". ## Processing climate projection data Individual files containing climate projections contain global, regional or local data, typically on a rectangular latitude-longitude grid, for a single parameter such as "near-surface temperature", and for a number of time steps. An analysis workflow then consists of a number of steps: - Download the appropriate data files for your desired combination of model, experiment, realization, geography, time range, parameter, ... (called a "data suite" henceforth). If your analysis involves multiple parameters (such as temperature and precipitation to estimate crop productivity), repeat the process for all parameters. If you want to make a multi-model ensemble to reduce model bias, repeat again for all desired model, experiment and realization combinations ("ensemble member"). You end up having one or more data suites to work with. - Take all files in a data suite and extract the data. Process the data in the data suite. Since the data are (mostly) 3-dimensional arrays, this will involve binding the arrays on the right dimension and then do something like `apply(data, 1:2, tapply, f, fun)` (following the CF Metadata Conventions, dimensions 1 and 2 are "longitude" and "latitude", respectively; the third dimension is "time"; none of this is standardized though and deviations are out there; package `ncdfCF` can help with the `CFData$array()` method). Repeat for the data suite for each ensemble member. - Combine the above results as your workflow requires. Frequently this involves computing "anomalies": ratio the data for one or more future periods to a baseline period. Repeat for each ensemble member. - Construct the multi-model ensemble from the individual ensemble members. Apart from the first step of obtaining the data, the steps lend themselves well to automation. The catch, however, is in the factor `f` to use with `tapply()`. The different models (in your ensemble) use different calendars, meaning that different factors are required. The `CFtime` package can help out. The `CFTime$factor()` method produces a factor that respects the calendar of the data files. The method comes in two operating modes: - Plain vanilla mode produces a factor for a time period across the entire time series. The factor level includes the year. This would be useful to calculate mean temperature for every month in every year, for instance. - When one or more "eras" (periods of interest) are provided, the factor level no longer includes the year and can be used to calculate, for instance, the mean temperature per period of interest in the era (e.g. average March temperature in the era 2041-2060). ```{r} # Setting up library(ncdfCF) fn <- list.files(path = system.file("extdata", package = "CFtime"), full.names = TRUE)[1] (ds <- ncdfCF::open_ncdf(fn)) # The T axis, with name "time" has a CFTime instance t <- ds[["time"]]$time() # Create monthly factors for a baseline era and early, mid and late 21st century eras baseline <- t$factor(era = 1991:2020) future <- t$factor(era = list(early = 2021:2040, mid = 2041:2060, late = 2061:2080)) str(baseline) str(future) ``` Building on the examples above of opening a file, creating a `CFTime` instance and a suitable factor for one data suite, here daily rainfall, the actual processing of the data into precipitation anomalies for 3 periods relative to a baseline period could look like this: ```{r} # Get the data for the "pr" data variable from the netCDF data set. # The `CFData$array()` method ensures that data are in standard R orientation. # Converts units of kg m-2 s-1 to mm/day. pr <- ds[["pr"]]$data()$array() * 86400 # Get a global attribute from the file experiment <- ds$attribute("experiment_id") # Calculate the daily average precipitation per month for the baseline period # and the three future eras. pr_base <- apply(pr, 1:2, tapply, baseline, mean) # an array pr_future <- lapply(future, function(f) apply(pr, 1:2, tapply, f, mean)) # a list of arrays # Calculate the precipitation anomalies for the future eras against the baseline. # Working with daily averages per month so we can simply subtract and then multiply by days # per month for each of the factor levels using the CF calendar. ano <- mapply(function(pr, f) {(pr - pr_base) * t$factor_units(f)}, pr_future, future, SIMPLIFY = FALSE) # Plot the results plot(1:12, ano$early[,1,1], type = "o", col = "blue", ylim = c(-50, 40), xlim = c(1, 12), main = paste0("Hamilton, New Zealand\nExperiment: ", experiment), xlab = "month", ylab = "Precipitation anomaly (mm)") lines(1:12, ano$mid[,1,1], type = "o", col = "green") lines(1:12, ano$late[,1,1], type = "o", col = "red") ``` Looks like Hadley will be needing rubber boots in spring and autumn back home! The interesting feature, working from opening the netCDF file down to plotting, is that the specifics of the CF calendar that the data suite uses do not have to be considered anywhere in the processing workflow: the `CFtime` package provides the functionality. Data suites using another CF calendar are processed exactly the same. ## Combining data from different models with different calendars Different climate projection data sets can use different calendars. It is absolutely essential to respect the calendar of the different data sets because the underlying solar and atmospheric physics are based on those calendars as well. In a typical situation, a researcher would construct a multi-model ensemble to remove or reduce the bias in any given model. The data sets composing the ensemble might well use different calendars. The correct way of constructing an ensemble is to perform the desired analysis on every ensemble member individually and to combine them only in the final step and to then perform any ensemble operations such as computing confidence intervals. The design of the `CFtime` package makes it easy to do this, through its heavy use of lists. Building on the previous example, let's make a multi-model ensemble of 2 models (not much of an ensemble but such are the limitations of including data with packages - the example easily extends to a larger set of ensemble members). ```{r} # Get the list of files that make up the ensemble members, here: # GFDL ESM4 and MRI ESM2 models for experiment SSP2-4.5, precipitation, CMIP6 2015-01-01 to 2099-12-31 lf <- list.files(path = system.file("extdata", package = "CFtime"), full.names = TRUE) # Loop over the files individually # ano is here a list with each element holding the results for a single model ano <- lapply(lf, function(fn) { ds <- ncdfCF::open_ncdf(fn) t <- ds[["time"]]$time() pr <- ds[["pr"]]$data()$array() * 86400 baseline <- t$factor(era = 1991:2020) pr_base <- apply(pr, 1:2, tapply, baseline, mean) future <- t$factor(era = list(early = 2021:2040, mid = 2041:2060, late = 2061:2080)) pr_future <- lapply(future, function(f) apply(pr, 1:2, tapply, f, mean)) mapply(function(pr, f) {(pr - pr_base) * t$factor_units(f)}, pr_future, future, SIMPLIFY = FALSE) }) # Era names eras <- c("early", "mid", "late") dim(eras) <- 3 # Build the ensemble for each era # For each era, grab the data for each of the ensemble members, simplify to an array # and take the mean per row (months, in this case) ensemble <- apply(eras, 1, function(e) { rowMeans(sapply(ano, function(a) a[[e]], simplify = TRUE))}) colnames(ensemble) <- eras rownames(ensemble) <- rownames(ano[[1]][[1]]) ensemble ``` Here we simply compute the average of the monthly precipitation anomaly over the ensemble members. In a more typical scenario, you would use the values from the individual models and to apply a more suitable analysis, such as calculating the confidence interval or model agreement. One significant advantage of this processing workflow is that it is easily parallelized: the bulk of the work goes into computing the anomalies, `ano`, and this is [embarrassingly parallel](https://en.wikipedia.org/wiki/Embarrassingly_parallel) because they read their own data and produce independent outputs. Use [package future](https://cran.r-project.org/package=future) or something similar to easily make the code run on all available CPU cores. ## Working with multiple files in a single data suite Due to the large size of typical climate projection data files, it is common to have a data suite that is contained in multiple files. A case in point is the CORDEX data set which breaks up the experiment period of 2006 - 2100 into 19 files of 5 years each, with each file covering a single parameter (temperature, precipitation, etc) over an entire domain (such as Europe, South Asia, Central America and the Caribbean, etc). The CFtime package can streamline processing of such multi-file data suites as well. Assuming that you have your CORDEX files in a directory on disk, organized by domain and other properties such as the variable, GCM/RCM combination, experiment, etc, the process of preparing the files for processing could be encoded in a function as below. The argument `fn` is a list of file names to process, and `var` is the variable contained in the files. (There are no checks on argument sanity here, which should really be included. This function only makes sense for a single [domain, GCM/RCM, experiment, variable] combination. Also be aware of data size, CORDEX files are huge and stitching all domain data together will easily exhaust available memory and it may thus lead to very large swap files and very poor performance - use the `ncdfCF::CFVariable$subset()` method to read spatial or temporal chunks of data to avoid such problems.) ```{r eval = FALSE} library(ncdfCF) library(abind) prepare_CORDEX <- function(fn, var, aoi) { data <- vector("list", length(fn)) for (i in 1:length(fn)) { ds <- ncdfCF::open_ncdf(fn[i]) if (i == 1) { # Get a CFTime instance from the first file t <- ds[["time"]]$time() } else { # Add offsets from the file and add to the CFTime instance t <- t + ds[["time"]]$time()$offsets } # Put the subsetted data array in the list data[[i]] <- ds[[var]]$subset(aoi = aoi)$array() } # Create a list for output with the CFTime instance and # the data bound in a single 3-dimensional array list(CFTime = t, data = abind(data, along = 3)) } ``` Calling this function like `prepare_CORDEX(list.files(path = "~/CC/CORDEX/CAM", pattern = "^pr.*\\.nc$", full.names = TRUE), "pr", ncdfCF::aoi(0, 20, 30, 50))` will yield a list of netCDF files with precipitation data, with the resulting `CFTime` instance describing the full temporal extent covered by the data files, as well as the data bound on the temporal dimension, ready for further processing. When working like this it is imperative that the offsets and the data arrays are added to their final structures *in exactly the same order*. It is not necessary that the offsets (and the data) themselves are in order, but the correspondence between offsets and data needs to be maintained. (`list.files()` produces a list in alphabetical order by default, which for most climate projection files produces offsets in chronological order.) ## Acknowledgements The results presented contain modified data from Copernicus Climate Change Service information, 2023-2025. Neither the European Commission nor ECMWF is responsible for any use that may be made of the Copernicus information or data it contains. We acknowledge the World Climate Research Programme, which, through its Working Group on Coupled Modelling, coordinated and promoted CMIP6. We thank the climate modeling groups for producing and making available their model output, the Earth System Grid Federation (ESGF) for archiving the data and providing access, and the multiple funding agencies who support CMIP6 and ESGF. The two data sets used as examples in this vignette carry the following license statements: - **pr_day_GFDL-ESM4_ssp245_r1i1p1f1_gr1_20150101-20991231_v20180701.nc:** CMIP6 model data produced by NOAA-GFDL is licensed under a Creative Commons Attribution-ShareAlike 4.0 International License (https://creativecommons.org/licenses/). Consult https://pcmdi.llnl.gov/CMIP6/TermsOfUse/ for terms of use governing CMIP6 output, including citation requirements and proper acknowledgment. Further information about this data, including some limitations, can be found via the further_info_url (recorded as a global attribute in this file). The data producers and data providers make no warranty, either express or implied, including, but not limited to, warranties of merchantability and fitness for a particular purpose. All liabilities arising from the supply of the information (including any liability arising in negligence) are excluded to the fullest extent permitted by law. - **pr_day_MRI-ESM2-0_ssp245_r1i1p1f1_gn_20150101-20991231_v20190603.nc:** CMIP6 model data produced by MRI is licensed under a Creative Commons Attribution-ShareAlike 4.0 International License (https://creativecommons.org/licenses/). Consult https://pcmdi.llnl.gov/CMIP6/TermsOfUse/ for terms of use governing CMIP6 output, including citation requirements and proper acknowledgment. Further information about this data, including some limitations, can be found via the further_info_url (recorded as a global attribute in this file). The data producers and data providers make no warranty, either express or implied, including, but not limited to, warranties of merchantability and fitness for a particular purpose. All liabilities arising from the supply of the information (including any liability arising in negligence) are excluded to the fullest extent permitted by law. CFtime/inst/doc/Conformance.html 0000644 0001762 0000144 00000030431 14761414433 016273 0 ustar ligges users
CFtime
is based on version 1.12 of the CF Metadata
Conventions. The text for the time coordinate in the
conventions may be consulted here.
The time coordinate is one of four coordinate types that receive “special treatment†in the conventions. The other three are longitude, latitude and the vertical. If you require convention-compliant support for any of these three other coordinate types, please consider using package
ncdfCF
which supports all three coordinate types and links withCFtime
for support of the time coordinate.
This document sets out how the CFtime
package conforms
to the CF Metadata Conventions, by section of the conventions. This
information is mostly useful for developers and expert users.
If you have issues reading a netCDF file that is due to conformance
of package CFtime
with the CF Metadata Conventions, please
open an issue on
GitHub.
Please note that there are many netCDF files out there that are not
claiming adherence to the CF Metadata Conventions but whose time
coordinate can still be successfully handled by CFtime
: the
netCDF
library itself provides the basic plumbing.
A CFTime
object is constructed from information that is
contained in the units
and calendar
attributes
for a time coordinate read out of a netCDF file. The package does not
actually access the netCDF file, it only uses the information that was
read out of the file by e.g. ncdfCF
. Consequently, the
CFtime
package can also construct a CFTime
object from suitable character strings.
This package is agnostic to the orientation of the axes in any data
variable that references the time coordinate. Consequently, the
standard_name
and axis
attributes are not
considered by this package (but the ncdfCF
package handles
both). Identification of a time coordinate is done by the
units
attribute, alone.
The CFtime
package fully supports the units
"second"
, "minute"
, "hour"
and
"day"
, including abbreviated and/or plural forms. Unit
"second"
is the SI second, a "minute"
equals
60 seconds, an "hour"
equals 3,600 seconds, and a
"day"
equals 86,400 seconds. This is exactly as expected,
but refer to the utc
calendar, below, for peculiarities of
that calendar.
The units
"month"
and "year"
are accepted on input but not using their definition in UDUNITS.
Instead, "year"
is a calendar year, so either 360, 365 or
366 days depending on its value and the calendar. A "month"
is similarly a calendar month. Use of either of these time units is
discouraged by the CF Metadata Conventions.
Other UDUNITS time units are not supported by this package.
All variants of the glue word "since"
are accepted,
being "after"
, "from"
, "ref"
and
"per"
.
The “reference datetime string†should be formatted using
the UDUNITS broken timestamp format or following ISO8601 rules, but
noting that datetimes valid in specific calendars other than Gregorian
(such as 2023-02-30
in the 360_day
calendar)
are acceptable as well. The UDUNITS “packed†format is not
supported.
Timezone information can only use 00
, 15
,
30
and 45
to indicate minutes; other minute
offsets have never been used anywhere. A time zone value of
"UTC"
is accepted, as an extension to the conventions. Even
though the conventions don’t indicate it, the tai
and
utc
calendars can carry no time zone indication as that
does not exist for either of these calendars.
If a calendar
attribute is not given,
"standard"
is assumed, being the default calendar as per
the conventions.
standard
(or the deprecated gregorian
):
Fully conformant, but leap seconds are never considered (see below). The
combination of a reference datetime and other
datetimes spanning the gap between 1582-10-05 and 1582-10-15,
in either direction, is supported.proleptic_gregorian
: Fully conformant, but leap seconds
are never considered (see below).julian
: Fully conformant, but, despite the suggestion
in the conventions, leap seconds do not exist in this calendar and are
thus never considered.utc
: Fully conformant. Leap seconds are always
accounted so when a leap second is included, UTC time progresses like
23:59:58 ... 23:59:59 ... 23:59:60 ... 00:00:00 ... 00:00:01
.
This also extends to minutes
23:59:00 ... 23:59:60 ... 00:00:59 ... 00:01:59
, always
adding 60 seconds. Likewise for hours
and
days
. Units "year"
and "month"
are not allowed, and neither is any time zone indication.tai
: Fully conformant. Units "year"
and
"month"
are not allowed, and neither is any time zone
indication.no_leap
/ 365_day
: Fully conformant.all_leap
/ 366_day
: Fully conformant.360_day
: Fully conformant.none
: Not implemented.The utc
calendar fully supports leap seconds.
The julian
calendar has no concept of leap seconds so
these are never possible or considered. Using a leap second in a
julian
calendar is an error.
In the standard
and proleptic_gregorian
calendars only the variant without leap seconds is considered. The
units_metadata
attribute is not considered, so assumed to
be "leap seconds: unknown"
. The assumption here is that if
second accuracy for a data producer is essential, then the entire tool
chain from observation equipment, to processing, to file recording will
have to be of known characteristics with regards to UTC time and leap
seconds and thus the utc
calendar would be used, rather
than standard
or proleptic_gregorian
with a
caveat communicated through the units_metdata
attribute.
Not implemented.
Not implemented.
Climate projection data sets are produced in a variety of formats but all conform to the CF Metadata Conventions. NetCDF data files, in particular, are highly structured and relatively easy to process. That said, it is very important to maintain a proper processing workflow such that the small changes in the climate projections are maintained and revealed through analysis. In this document, the basic workflow with varying calendars is described.
In this vignette we are using the
ncdfCF
package as that provides the easiest interface to work with netCDF files. PackageCFtime
is integrated intoncdfCF
which makes working with time dimensions in netCDF seamless.
PackagesRNetCDF
andncdf4
can work withCFtime
as well but then the “intelligence†built intoncdfCF
is not available, such as automatically identifying axes and data orientation. Other packages liketerra
andstars
are not recommended because they do not provide access to the specifics of the time dimension of the data and do not consider any calendars other than “proleptic_gregorianâ€.
Individual files containing climate projections contain global, regional or local data, typically on a rectangular latitude-longitude grid, for a single parameter such as “near-surface temperatureâ€, and for a number of time steps. An analysis workflow then consists of a number of steps:
apply(data, 1:2, tapply, f, fun)
(following the CF Metadata Conventions, dimensions 1 and 2 are
“longitude†and “latitudeâ€, respectively; the third dimension is “timeâ€;
none of this is standardized though and deviations are out there;
package ncdfCF
can help with the
CFData$array()
method). Repeat for the data suite for each
ensemble member.Apart from the first step of obtaining the data, the steps lend
themselves well to automation. The catch, however, is in the factor
f
to use with tapply()
. The different models
(in your ensemble) use different calendars, meaning that different
factors are required. The CFtime
package can help out.
The CFTime$factor()
method produces a factor that
respects the calendar of the data files. The method comes in two
operating modes:
# Setting up
library(ncdfCF)
fn <- list.files(path = system.file("extdata", package = "CFtime"), full.names = TRUE)[1]
(ds <- ncdfCF::open_ncdf(fn))
#> <Dataset> pr_day_GFDL-ESM4_ssp245_r1i1p1f1_gr1_20150101-20991231_v20180701
#> Resource : /private/var/folders/gs/s0mmlczn4l7bjbmwfrrhjlt80000gn/T/RtmpzkXN7W/Rinst111b86a3f8a11/CFtime/extdata/pr_day_GFDL-ESM4_ssp245_r1i1p1f1_gr1_20150101-20991231_v20180701.nc
#> Format : netcdf4
#> Type : generic netCDF data
#> Conventions: CF-1.7 CMIP-6.0 UGRID-1.0
#> Keep open : FALSE
#> Has groups : FALSE
#>
#> Variable:
#> name long_name units data_type axes
#> pr Precipitation kg m-2 s-1 NC_FLOAT lon, lat, time
#>
#> Axes:
#> id axis name long_name length values unit
#> 0 bnds vertex number 2 [1 ... 2]
#> 1 Y lat latitude 1 [-37.5] degrees_north
#> 2 T time 31025 [2015-01-01 12:00... days since 1850-0...
#> 3 X lon longitude 1 [175.625] degrees_east
#>
#> Attributes:
#> id name type length
#> 0 external_variables NC_CHAR 9
#> 1 history NC_CHAR 124
#> 2 table_id NC_CHAR 3
#> 3 activity_id NC_CHAR 11
#> 4 branch_method NC_CHAR 8
#> 5 branch_time_in_child NC_DOUBLE 1
#> 6 branch_time_in_parent NC_DOUBLE 1
#> 7 comment NC_CHAR 10
#> 8 contact NC_CHAR 32
#> 9 Conventions NC_CHAR 25
#> 10 creation_date NC_CHAR 20
#> 11 data_specs_version NC_CHAR 8
#> 12 experiment NC_CHAR 30
#> 13 experiment_id NC_CHAR 6
#> 14 forcing_index NC_INT 1
#> 15 frequency NC_CHAR 3
#> 16 further_info_url NC_CHAR 77
#> 17 grid NC_CHAR 94
#> 18 grid_label NC_CHAR 3
#> 19 initialization_index NC_INT 1
#> 20 institution NC_CHAR 112
#> 21 institution_id NC_CHAR 9
#> 22 license NC_CHAR 805
#> 23 mip_era NC_CHAR 5
#> 24 nominal_resolution NC_CHAR 6
#> 25 parent_activity_id NC_CHAR 4
#> 26 parent_experiment_id NC_CHAR 10
#> 27 parent_mip_era NC_CHAR 5
#> 28 parent_source_id NC_CHAR 9
#> 29 parent_time_units NC_CHAR 19
#> 30 parent_variant_label NC_CHAR 8
#> 31 physics_index NC_INT 1
#> 32 product NC_CHAR 12
#> 33 realization_index NC_INT 1
#> 34 realm NC_CHAR 5
#> 35 source NC_CHAR 560
#> 36 source_id NC_CHAR 9
#> 37 source_type NC_CHAR 18
#> 38 sub_experiment NC_CHAR 4
#> 39 sub_experiment_id NC_CHAR 4
#> 40 title NC_CHAR 82
#> 41 tracking_id NC_CHAR 49
#> 42 variable_id NC_CHAR 2
#> 43 variant_info NC_CHAR 3
#> 44 references NC_CHAR 30
#> 45 variant_label NC_CHAR 8
#> value
#> areacella
#> File was processed by fremetar (GFDL analog of ...
#> day
#> ScenarioMIP
#> standard
#> 60225
#> 60225
#> <null ref>
#> gfdl.climate.model.info@noaa.gov
#> CF-1.7 CMIP-6.0 UGRID-1.0
#> 2019-06-18T05:29:00Z
#> 01.00.27
#> update of RCP4.5 based on SSP2
#> ssp245
#> 1
#> day
#> https://furtherinfo.es-doc.org/CMIP6.NOAA-GFDL....
#> atmos data regridded from Cubed-sphere (c96) to...
#> gr1
#> 1
#> National Oceanic and Atmospheric Administration...
#> NOAA-GFDL
#> CMIP6 model data produced by NOAA-GFDL is licen...
#> CMIP6
#> 100 km
#> CMIP
#> historical
#> CMIP6
#> GFDL-ESM4
#> days since 1850-1-1
#> r1i1p1f1
#> 1
#> model-output
#> 1
#> atmos
#> GFDL-ESM4 (2018):\natmos: GFDL-AM4.1 (Cubed-sphe...
#> GFDL-ESM4
#> AOGCM AER CHEM BGC
#> none
#> none
#> NOAA GFDL GFDL-ESM4 model output prepared for C...
#> hdl:21.14100/48767401-8960-4864-8738-e64640bef71d
#> pr
#> N/A
#> see further_info_url attribute
#> r1i1p1f1
# The T axis, with name "time" has a CFTime instance
t <- ds[["time"]]$time()
# Create monthly factors for a baseline era and early, mid and late 21st century eras
baseline <- t$factor(era = 1991:2020)
future <- t$factor(era = list(early = 2021:2040, mid = 2041:2060, late = 2061:2080))
str(baseline)
#> Factor w/ 12 levels "01","02","03",..: 1 1 1 1 1 1 1 1 1 1 ...
#> - attr(*, "era")= int 30
#> - attr(*, "period")= chr "month"
str(future)
#> List of 3
#> $ early: Factor w/ 12 levels "01","02","03",..: NA NA NA NA NA NA NA NA NA NA ...
#> ..- attr(*, "era")= int 20
#> ..- attr(*, "period")= chr "month"
#> $ mid : Factor w/ 12 levels "01","02","03",..: NA NA NA NA NA NA NA NA NA NA ...
#> ..- attr(*, "era")= int 20
#> ..- attr(*, "period")= chr "month"
#> $ late : Factor w/ 12 levels "01","02","03",..: NA NA NA NA NA NA NA NA NA NA ...
#> ..- attr(*, "era")= int 20
#> ..- attr(*, "period")= chr "month"
Building on the examples above of opening a file, creating a
CFTime
instance and a suitable factor for one data suite,
here daily rainfall, the actual processing of the data into
precipitation anomalies for 3 periods relative to a baseline period
could look like this:
# Get the data for the "pr" data variable from the netCDF data set.
# The `CFData$array()` method ensures that data are in standard R orientation.
# Converts units of kg m-2 s-1 to mm/day.
pr <- ds[["pr"]]$data()$array() * 86400
# Get a global attribute from the file
experiment <- ds$attribute("experiment_id")
# Calculate the daily average precipitation per month for the baseline period
# and the three future eras.
pr_base <- apply(pr, 1:2, tapply, baseline, mean) # an array
pr_future <- lapply(future, function(f) apply(pr, 1:2, tapply, f, mean)) # a list of arrays
# Calculate the precipitation anomalies for the future eras against the baseline.
# Working with daily averages per month so we can simply subtract and then multiply by days
# per month for each of the factor levels using the CF calendar.
ano <- mapply(function(pr, f) {(pr - pr_base) * t$factor_units(f)}, pr_future, future, SIMPLIFY = FALSE)
# Plot the results
plot(1:12, ano$early[,1,1], type = "o", col = "blue", ylim = c(-50, 40), xlim = c(1, 12),
main = paste0("Hamilton, New Zealand\nExperiment: ", experiment),
xlab = "month", ylab = "Precipitation anomaly (mm)")
lines(1:12, ano$mid[,1,1], type = "o", col = "green")
lines(1:12, ano$late[,1,1], type = "o", col = "red")
Looks like Hadley will be needing rubber boots in spring and autumn back home!
The interesting feature, working from opening the netCDF file down to
plotting, is that the specifics of the CF calendar that the data suite
uses do not have to be considered anywhere in the processing workflow:
the CFtime
package provides the functionality. Data suites
using another CF calendar are processed exactly the same.
Different climate projection data sets can use different calendars. It is absolutely essential to respect the calendar of the different data sets because the underlying solar and atmospheric physics are based on those calendars as well.
In a typical situation, a researcher would construct a multi-model
ensemble to remove or reduce the bias in any given model. The data sets
composing the ensemble might well use different calendars. The correct
way of constructing an ensemble is to perform the desired analysis on
every ensemble member individually and to combine them only in the final
step and to then perform any ensemble operations such as computing
confidence intervals. The design of the CFtime
package
makes it easy to do this, through its heavy use of lists. Building on
the previous example, let’s make a multi-model ensemble of 2 models (not
much of an ensemble but such are the limitations of including data with
packages - the example easily extends to a larger set of ensemble
members).
# Get the list of files that make up the ensemble members, here:
# GFDL ESM4 and MRI ESM2 models for experiment SSP2-4.5, precipitation, CMIP6 2015-01-01 to 2099-12-31
lf <- list.files(path = system.file("extdata", package = "CFtime"), full.names = TRUE)
# Loop over the files individually
# ano is here a list with each element holding the results for a single model
ano <- lapply(lf, function(fn) {
ds <- ncdfCF::open_ncdf(fn)
t <- ds[["time"]]$time()
pr <- ds[["pr"]]$data()$array() * 86400
baseline <- t$factor(era = 1991:2020)
pr_base <- apply(pr, 1:2, tapply, baseline, mean)
future <- t$factor(era = list(early = 2021:2040, mid = 2041:2060, late = 2061:2080))
pr_future <- lapply(future, function(f) apply(pr, 1:2, tapply, f, mean))
mapply(function(pr, f) {(pr - pr_base) * t$factor_units(f)}, pr_future, future, SIMPLIFY = FALSE)
})
# Era names
eras <- c("early", "mid", "late")
dim(eras) <- 3
# Build the ensemble for each era
# For each era, grab the data for each of the ensemble members, simplify to an array
# and take the mean per row (months, in this case)
ensemble <- apply(eras, 1, function(e) {
rowMeans(sapply(ano, function(a) a[[e]], simplify = TRUE))})
colnames(ensemble) <- eras
rownames(ensemble) <- rownames(ano[[1]][[1]])
ensemble
#> early mid late
#> 01 -22.901333 -15.2675652 -1.6380748
#> 02 -25.430060 -21.4016013 -15.3427744
#> 03 -21.895792 -14.7434749 -22.4187823
#> 04 6.763958 6.9498244 15.2008249
#> 05 -2.635598 -15.3054439 -17.0106058
#> 06 -43.152012 -47.3442148 -32.1797467
#> 07 1.072942 10.4940924 3.9473240
#> 08 4.124084 -6.0917940 -12.9178847
#> 09 9.231852 -0.2038321 2.7198366
#> 10 5.148302 10.3044202 12.0060866
#> 11 16.186108 25.9156463 8.2168498
#> 12 -3.211510 -0.2036319 0.7604947
Here we simply compute the average of the monthly precipitation anomaly over the ensemble members. In a more typical scenario, you would use the values from the individual models and to apply a more suitable analysis, such as calculating the confidence interval or model agreement.
One significant advantage of this processing workflow is that it is
easily parallelized: the bulk of the work goes into computing the
anomalies, ano
, and this is embarrassingly
parallel because they read their own data and produce independent
outputs. Use package
future or something similar to easily make the code run on all
available CPU cores.
Due to the large size of typical climate projection data files, it is common to have a data suite that is contained in multiple files. A case in point is the CORDEX data set which breaks up the experiment period of 2006 - 2100 into 19 files of 5 years each, with each file covering a single parameter (temperature, precipitation, etc) over an entire domain (such as Europe, South Asia, Central America and the Caribbean, etc). The CFtime package can streamline processing of such multi-file data suites as well.
Assuming that you have your CORDEX files in a directory on disk,
organized by domain and other properties such as the variable, GCM/RCM
combination, experiment, etc, the process of preparing the files for
processing could be encoded in a function as below. The argument
fn
is a list of file names to process, and var
is the variable contained in the files. (There are no checks on argument
sanity here, which should really be included. This function only makes
sense for a single [domain, GCM/RCM, experiment, variable] combination.
Also be aware of data size, CORDEX files are huge and stitching all
domain data together will easily exhaust available memory and it may
thus lead to very large swap files and very poor performance - use the
ncdfCF::CFVariable$subset()
method to read spatial or
temporal chunks of data to avoid such problems.)
library(ncdfCF)
library(abind)
prepare_CORDEX <- function(fn, var, aoi) {
data <- vector("list", length(fn))
for (i in 1:length(fn)) {
ds <- ncdfCF::open_ncdf(fn[i])
if (i == 1) {
# Get a CFTime instance from the first file
t <- ds[["time"]]$time()
} else {
# Add offsets from the file and add to the CFTime instance
t <- t + ds[["time"]]$time()$offsets
}
# Put the subsetted data array in the list
data[[i]] <- ds[[var]]$subset(aoi = aoi)$array()
}
# Create a list for output with the CFTime instance and
# the data bound in a single 3-dimensional array
list(CFTime = t, data = abind(data, along = 3))
}
Calling this function like
prepare_CORDEX(list.files(path = "~/CC/CORDEX/CAM", pattern = "^pr.*\\.nc$", full.names = TRUE), "pr", ncdfCF::aoi(0, 20, 30, 50))
will yield a list of netCDF files with precipitation data, with the
resulting CFTime
instance describing the full temporal
extent covered by the data files, as well as the data bound on the
temporal dimension, ready for further processing.
When working like this it is imperative that the offsets and the data
arrays are added to their final structures in exactly the same
order. It is not necessary that the offsets (and the data)
themselves are in order, but the correspondence between offsets and data
needs to be maintained. (list.files()
produces a list in
alphabetical order by default, which for most climate projection files
produces offsets in chronological order.)
The results presented contain modified data from Copernicus Climate Change Service information, 2023-2025. Neither the European Commission nor ECMWF is responsible for any use that may be made of the Copernicus information or data it contains.
We acknowledge the World Climate Research Programme, which, through its Working Group on Coupled Modelling, coordinated and promoted CMIP6. We thank the climate modeling groups for producing and making available their model output, the Earth System Grid Federation (ESGF) for archiving the data and providing access, and the multiple funding agencies who support CMIP6 and ESGF.
The two data sets used as examples in this vignette carry the following license statements:
Around the world, many climate change models are being developed (100+) under the umbrella of the World Climate Research Programme to assess the rate of climate change. Published data is generally publicly available to download for research and other (non-commercial) purposes through partner organizations in the Earth Systems Grid Federation.
The data are all formatted to comply with the CF Metadata Conventions, a set of standards to support standardization among research groups and published data sets. These conventions greatly facilitate use and analysis of the climate projections because standard processing work flows (should) work across the various data sets.
On the flip side, the CF Metadata Conventions needs to cater to a wide range of modeling requirements and that means that some of the areas covered by the standards are more complex than might be assumed. One of those areas is the temporal dimension of the data sets. The CF Metadata Conventions supports no less than 11 different calendar definitions, that, upon analysis, fall into 8 distinct calendars (from the perspective of computation of climate projections):
standard
(or gregorian
): The Gregorian
calendar that is in common use in many countries around the world,
adopted by edict of Pope Gregory XIII in 1582 and in effect from 15
October of that year. The earliest valid time in this calendar is
0001-01-01 00:00:00 (1 January of year 1) as year 0 does not exist and
the CF Metadata Conventions require the year to be positive, but noting
that a Julian calendar is used in periods before the Gregorian calendar
was introduced.proleptic_gregorian
: This is the Gregorian calendar
with validity extended to periods prior to 1582-10-15
,
including a year 0 and negative years. This calendar is being used in
most OSes and is what is being used by R.tai
: International Atomic Time, a global standard for
linear time: it counts seconds since its start at 1958-01-01 00:00:00.
For presentation it uses the Gregorian calendar. Timestamps prior to its
start are not allowed.utc
: Coordinated Universal Time, the standard for civil
timekeeping all over the world. It is based on International Atomic Time
but it uses occasional leap seconds to remain synchronous with Earth’s
rotation around the Sun; at the end of 2024 it is 37 seconds behind
tai
. It uses the Gregorian calendar with a start at
1972-01-01 00:00:00; earlier timestamps are not allowed. Future
timestamps are also not allowed because the insertion of leap seconds is
unpredictable. Most computer clocks use UTC.julian
: Adopted in the year 45 BCE, every fourth year
is a leap year. Originally, the Julian calendar did not have a
monotonically increasing year assigned to it and there are indeed
several Julian calendars in use around the world today with different
years assigned to them. Common interpretation is currently that the year
is the same as that of the Gregorian calendar. The Julian calendar is
currently 13 days behind the Gregorian calendar. As with the standard
calendar, the earliest valid time in this calendar is 0001-01-01
00:00:00.365_day
or noleap
: “Model time†in which
no years have a leap day. Negative years are allowed and year 0
exists.366_day
or all_leap
: “Model time†in which
all years have a leap day. Negative years are allowed and year 0
exists.360_day
: “Model time†in which every year has 12 months
of 30 days each. Negative years are allowed and year 0 exists.The three latter calendars of model time are specific to the CF
Metadata Conventions to reduce computational complexities of working
with dates. None of the 8 calendars are compliant with the standard
POSIXt
date/time facilities in R
and using
standard date/time functions would quickly lead to problems. See the
section on “CFtime and POSIXtâ€, below, for a detailed description of the
discrepancies between the CF calendars and POSIXt.
In the below code snippet, the date of 1949-12-01
is the
origin from which other dates are calculated. When adding
43,289 days to this origin for a data set that uses the
360_day
calendar, that should yield a date some 120 years
after the origin:
# POSIXt calculations on a standard calendar - INCORRECT
as.Date("1949-12-01") + 43289
#> [1] "2068-06-08"
# CFtime calculation on a "360_day" calendar - CORRECT
# See below examples for details on the two functions
as_timestamp(CFtime("days since 1949-12-01", "360_day", 43289))
#> [1] "2070-02-30"
Using standard POSIXt
calculations gives a result that
is about 21 months off from the correct date - obviously an undesirable
situation. This example is far from artificial: 1949-12-01
is the origin for all CORDEX data, covering the period 1950 - 2005 for
historical experiments and the period 2006 - 2100 for RCP experiments
(with some deviation between data sets), and several models used in the
CORDEX set use the 360_day
calendar. The
365_day
or noleap
calendar deviates by about 1
day every 4 years (disregarding centurial years), or about 24 days in a
century. The 366_day
or all_leap
calendar
deviates by about 3 days every 4 years, or about 76 days in a
century.
The CFtime
package deals with the complexity of the
different calendars allowed by the CF Metadata Conventions. It properly
formats dates and times (even oddball dates like
2070-02-30
) and it can generate calendar-aware factors for
further processing of the data.
The character of CF time series - a number of numerical offsets from
a base date - implies that there should only be a single time zone
associated with the time series, and then only for the
standard
and proleptic_gregorian
calendars.
For the other calendars a time zone can be set but it will have no
effect. Daylight savings time information is never considered by
CFtime
so the user should take care to avoid entering times
with DST.
The time zone offset from UTC is stored in the CFTime
instance and can be retrieved with the timezone()
function.
If a vector of character timestamps with time zone information is parsed
with the parse_timestamps()
function and the time zones are
found to be different from the CFTime
time zone, a warning
message is generated but the timestamp is interpreted as being in the
CFTime
time zone. No correction of timestamp to
CFTime
time zone is performed.
The concept of time zones does not apply to the utc
and
tai
calendars as they represent universal time, i.e. the
indicated time is valid all over the globe. Timestamps passed to these
calendars should not have a time zone indicated, but if there are,
anything other than a 0 offset will generate an error.
Data sets that are compliant with the CF Metadata Conventions always
include a origin, a specific point in time in reference to a
specified calendar, from which other points in time are
calculated by adding a specified offset of a certain
unit. This approach is encapsulated in the CFtime
package by the R6 class CFTime
.
# Create a CFTime object from a definition string, a calendar and some offsets
(t <- CFtime("days since 1949-12-01", "360_day", 19830:90029))
#> CF calendar:
#> Origin : 1949-12-01 00:00:00
#> Units : days
#> Type : 360_day
#> Time series:
#> Elements: [2005-01-01 .. 2199-12-30] (average of 1.000000 days between 70200 elements)
#> Bounds : not set
The CFtime()
function takes a description (which is
actually a unit - “days†- in reference to an origin - “1949-12-01â€), a
calendar description, and a vector of offsets from that origin.
Once a CFTime
instance is created its origin and calendar
cannot be changed anymore. Offsets may be added.
In practice, these parameters will be taken from the data set of interest. CF Metadata Conventions require data sets to be in the netCDF format, with all metadata describing the data set included in a single file, including the mandatory “Conventions†global attribute which should have a string identifying the version of the CF Metadata Conventions that this file adheres to (among possible others). Not surprisingly, all the pieces of interest are contained in the “time†dimension of the file. The process then becomes as follows, for a CMIP6 file of daily precipitation:
# Opening a data file that is included with the package.
# Usually you would `list.files()` on a directory of your choice.
fn <- list.files(path = system.file("extdata", package = "CFtime"), full.names = TRUE)[1]
(ds <- ncdfCF::open_ncdf(fn))
#> <Dataset> pr_day_GFDL-ESM4_ssp245_r1i1p1f1_gr1_20150101-20991231_v20180701
#> Resource : /private/var/folders/gs/s0mmlczn4l7bjbmwfrrhjlt80000gn/T/RtmpzkXN7W/Rinst111b86a3f8a11/CFtime/extdata/pr_day_GFDL-ESM4_ssp245_r1i1p1f1_gr1_20150101-20991231_v20180701.nc
#> Format : netcdf4
#> Type : generic netCDF data
#> Conventions: CF-1.7 CMIP-6.0 UGRID-1.0
#> Keep open : FALSE
#> Has groups : FALSE
#>
#> Variable:
#> name long_name units data_type axes
#> pr Precipitation kg m-2 s-1 NC_FLOAT lon, lat, time
#>
#> Axes:
#> id axis name long_name length values unit
#> 0 bnds vertex number 2 [1 ... 2]
#> 1 Y lat latitude 1 [-37.5] degrees_north
#> 2 T time 31025 [2015-01-01 12:00... days since 1850-0...
#> 3 X lon longitude 1 [175.625] degrees_east
#>
#> Attributes:
#> id name type length
#> 0 external_variables NC_CHAR 9
#> 1 history NC_CHAR 124
#> 2 table_id NC_CHAR 3
#> 3 activity_id NC_CHAR 11
#> 4 branch_method NC_CHAR 8
#> 5 branch_time_in_child NC_DOUBLE 1
#> 6 branch_time_in_parent NC_DOUBLE 1
#> 7 comment NC_CHAR 10
#> 8 contact NC_CHAR 32
#> 9 Conventions NC_CHAR 25
#> 10 creation_date NC_CHAR 20
#> 11 data_specs_version NC_CHAR 8
#> 12 experiment NC_CHAR 30
#> 13 experiment_id NC_CHAR 6
#> 14 forcing_index NC_INT 1
#> 15 frequency NC_CHAR 3
#> 16 further_info_url NC_CHAR 77
#> 17 grid NC_CHAR 94
#> 18 grid_label NC_CHAR 3
#> 19 initialization_index NC_INT 1
#> 20 institution NC_CHAR 112
#> 21 institution_id NC_CHAR 9
#> 22 license NC_CHAR 805
#> 23 mip_era NC_CHAR 5
#> 24 nominal_resolution NC_CHAR 6
#> 25 parent_activity_id NC_CHAR 4
#> 26 parent_experiment_id NC_CHAR 10
#> 27 parent_mip_era NC_CHAR 5
#> 28 parent_source_id NC_CHAR 9
#> 29 parent_time_units NC_CHAR 19
#> 30 parent_variant_label NC_CHAR 8
#> 31 physics_index NC_INT 1
#> 32 product NC_CHAR 12
#> 33 realization_index NC_INT 1
#> 34 realm NC_CHAR 5
#> 35 source NC_CHAR 560
#> 36 source_id NC_CHAR 9
#> 37 source_type NC_CHAR 18
#> 38 sub_experiment NC_CHAR 4
#> 39 sub_experiment_id NC_CHAR 4
#> 40 title NC_CHAR 82
#> 41 tracking_id NC_CHAR 49
#> 42 variable_id NC_CHAR 2
#> 43 variant_info NC_CHAR 3
#> 44 references NC_CHAR 30
#> 45 variant_label NC_CHAR 8
#> value
#> areacella
#> File was processed by fremetar (GFDL analog of ...
#> day
#> ScenarioMIP
#> standard
#> 60225
#> 60225
#> <null ref>
#> gfdl.climate.model.info@noaa.gov
#> CF-1.7 CMIP-6.0 UGRID-1.0
#> 2019-06-18T05:29:00Z
#> 01.00.27
#> update of RCP4.5 based on SSP2
#> ssp245
#> 1
#> day
#> https://furtherinfo.es-doc.org/CMIP6.NOAA-GFDL....
#> atmos data regridded from Cubed-sphere (c96) to...
#> gr1
#> 1
#> National Oceanic and Atmospheric Administration...
#> NOAA-GFDL
#> CMIP6 model data produced by NOAA-GFDL is licen...
#> CMIP6
#> 100 km
#> CMIP
#> historical
#> CMIP6
#> GFDL-ESM4
#> days since 1850-1-1
#> r1i1p1f1
#> 1
#> model-output
#> 1
#> atmos
#> GFDL-ESM4 (2018):\natmos: GFDL-AM4.1 (Cubed-sphe...
#> GFDL-ESM4
#> AOGCM AER CHEM BGC
#> none
#> none
#> NOAA GFDL GFDL-ESM4 model output prepared for C...
#> hdl:21.14100/48767401-8960-4864-8738-e64640bef71d
#> pr
#> N/A
#> see further_info_url attribute
#> r1i1p1f1
# "Conventions" global attribute must have a string like "CF-1.*" for this package to work reliably
# Look at the "time" axis
(time <- ds[["time"]])
#> <Time axis> [2] time
#> Length : 31025
#> Axis : T
#> Calendar : noleap
#> Range : 2015-01-01 12:00:00 ... 2099-12-31 12:00:00 (days)
#> Bounds : 2015-01-01 ... 2100-01-01
#>
#> Attributes:
#> id name type length value
#> 0 long_name NC_CHAR 4 time
#> 1 axis NC_CHAR 1 T
#> 2 calendar_type NC_CHAR 6 noleap
#> 3 bounds NC_CHAR 9 time_bnds
#> 4 standard_name NC_CHAR 4 time
#> 5 description NC_CHAR 13 Temporal mean
#> 6 units NC_CHAR 21 days since 1850-01-01
#> 7 calendar NC_CHAR 6 noleap
# Get the CFTime instance from the "time" axis
(t <- time$time())
#> CF calendar:
#> Origin : 1850-01-01 00:00:00
#> Units : days
#> Type : noleap
#> Time series:
#> Elements: [2015-01-01 12:00:00 .. 2099-12-31 12:00:00] (average of 1.000000 days between 31025 elements)
#> Bounds : regular and consecutive
You can see from the global attribute “Conventions†that the file
adheres to the CF Metadata Conventions, among others. According to the
CF conventions, units
and calendar
are
required attributes of the “time†dimension in the netCDF file.
The above example (and others in this vignette) use the
ncdfCF
package. If you are using the RNetCDF
or ncdf4
package, checking for CF conventions and then
creating a CFTime
instance goes like this:
library(RNetCDF)
nc <- open.nc(fn)
att.get.nc(nc, -1, "Conventions")
t <- CFtime(att.get.nc(nc, "time", "units"),
att.get.nc(nc, "time", "calendar"),
var.get.nc(nc, "time"))
library(ncdf4)
nc <- nc_open(fn)
nc_att_get(nc, 0, "Conventions")
t <- CFtime(nc$dim$time$units,
nc$dim$time$calendar,
nc$dim$time$vals)
The character representations of the time series can be easily generated:
dates <- t$as_timestamp(format = "date")
dates[1:10]
#> [1] "2015-01-01" "2015-01-02" "2015-01-03" "2015-01-04" "2015-01-05"
#> [6] "2015-01-06" "2015-01-07" "2015-01-08" "2015-01-09" "2015-01-10"
…as well as the range of the time series:
Note that in this latter case, if any of the timestamps in the time
series have a time that is other than 00:00:00
then the
time of the extremes of the time series is also displayed. This is a
common occurrence because the CF Metadata Conventions prescribe that the
middle of the time period (month, day, etc) is recorded, which for
months with 31 days would be something like
2005-01-15T12:00:00
.
When working with high resolution climate projection data, typically at a “day†resolution, one of the processing steps would be to aggregate the data to some lower resolution such as a dekad (10-day period), a month or a meteorological season, and then compute a derivative value such as the dekadal sum of precipitation, monthly minimum/maximum daily temperature, or seasonal average daily short-wave irradiance.
It is also possible to create factors for multiple “eras†in one go. This greatly reduces programming effort if you want to calculate anomalies over multiple future periods. A complete example is provided in the vignette “Processing climate projection dataâ€.
It is easy to generate the factors that you need once you have a
CFTime
instance prepared:
# Create a dekad factor for the whole `t` time series that was created above
f_k <- t$factor("dekad")
str(f_k)
#> Factor w/ 3060 levels "2015D01","2015D02",..: 1 1 1 1 1 1 1 1 1 1 ...
#> - attr(*, "era")= int -1
#> - attr(*, "period")= chr "dekad"
#> - attr(*, "CFTime")=Classes 'CFTime', 'R6' 2015-01-06 00:00:00 2015-01-16 00:00:00 2015-01-26 12:00:00 2015-02-06 00:00:00 2015-02-16 00:00:00 2015-02-25 00:00:00 2015-03-06 00:00:00 2015-03-16 00:00:00 2015-03-26 12:00:00 2015-04-06 00:00:00 2015-04-16 00:00:00 2015-04-26 00:00:00 2015-05-06 00:00:00 2015-05-16 00:00:00 2015-05-26 12:00:00 2015-06-06 00:00:00 2015-06-16 00:00:00 2015-06-26 00:00:00 2015-07-06 00:00:00 2015-07-16 00:00:00 2015-07-26 12:00:00 2015-08-06 00:00:00 2015-08-16 00:00:00 2015-08-26 12:00:00 2015-09-06 00:00:00 2015-09-16 00:00:00 2015-09-26 00:00:00 2015-10-06 00:00:00 2015-10-16 00:00:00 2015-10-26 12:00:00 2015-11-06 00:00:00 2015-11-16 00:00:00 2015-11-26 00:00:00 2015-12-06 00:00:00 2015-12-16 00:00:00 2015-12-26 12:00:00 2016-01-06 00:00:00 2016-01-16 00:00:00 2016-01-26 12:00:00 2016-02-06 00:00:00 2016-02-16 00:00:00 2016-02-25 00:00:00 2016-03-06 00:00:00 2016-03-16 00:00:00 2016-03-26 12:00:00 2016-04-06 00:00:00 2016-04-16 00:00:00 2016-04-26 00:00:00 2016-05-06 00:00:00 2016-05-16 00:00:00 2016-05-26 12:00:00 2016-06-06 00:00:00 2016-06-16 00:00:00 2016-06-26 00:00:00 2016-07-06 00:00:00 2016-07-16 00:00:00 2016-07-26 12:00:00 2016-08-06 00:00:00 2016-08-16 00:00:00 2016-08-26 12:00:00 2016-09-06 00:00:00 2016-09-16 00:00:00 2016-09-26 00:00:00 2016-10-06 00:00:00 2016-10-16 00:00:00 2016-10-26 12:00:00 2016-11-06 00:00:00 2016-11-16 00:00:00 2016-11-26 00:00:00 2016-12-06 00:00:00 2016-12-16 00:00:00 2016-12-26 12:00:00 2017-01-06 00:00:00 2017-01-16 00:00:00 2017-01-26 12:00:00 2017-02-06 00:00:00 2017-02-16 00:00:00 2017-02-25 00:00:00 2017-03-06 00:00:00 2017-03-16 00:00:00 2017-03-26 12:00:00 2017-04-06 00:00:00 2017-04-16 00:00:00 2017-04-26 00:00:00 2017-05-06 00:00:00 2017-05-16 00:00:00 2017-05-26 12:00:00 2017-06-06 00:00:00 2017-06-16 00:00:00 2017-06-26 00:00:00 2017-07-06 00:00:00 2017-07-16 00:00:00 2017-07-26 12:00:00 2017-08-06 00:00:00 2017-08-16 00:00:00 2017-08-26 12:00:00 2017-09-06 00:00:00 2017-09-16 00:00:00 2017-09-26 00:00:00 2017-10-06 00:00:00 2017-10-16 00:00:00 2017-10-26 12:00:00 2017-11-06 00:00:00 2017-11-16 00:00:00 2017-11-26 00:00:00 2017-12-06 00:00:00 2017-12-16 00:00:00 2017-12-26 12:00:00 2018-01-06 00:00:00 2018-01-16 00:00:00 2018-01-26 12:00:00 2018-02-06 00:00:00 2018-02-16 00:00:00 2018-02-25 00:00:00 2018-03-06 00:00:00 2018-03-16 00:00:00 2018-03-26 12:00:00 2018-04-06 00:00:00 2018-04-16 00:00:00 2018-04-26 00:00:00 2018-05-06 00:00:00 2018-05-16 00:00:00 2018-05-26 12:00:00 2018-06-06 00:00:00 2018-06-16 00:00:00 2018-06-26 00:00:00 2018-07-06 00:00:00 2018-07-16 00:00:00 2018-07-26 12:00:00 2018-08-06 00:00:00 2018-08-16 00:00:00 2018-08-26 12:00:00 2018-09-06 00:00:00 2018-09-16 00:00:00 2018-09-26 00:00:00 2018-10-06 00:00:00 2018-10-16 00:00:00 2018-10-26 12:00:00 2018-11-06 00:00:00 2018-11-16 00:00:00 2018-11-26 00:00:00 2018-12-06 00:00:00 2018-12-16 00:00:00 2018-12-26 12:00:00 2019-01-06 00:00:00 2019-01-16 00:00:00 2019-01-26 12:00:00 2019-02-06 00:00:00 2019-02-16 00:00:00 2019-02-25 00:00:00 2019-03-06 00:00:00 2019-03-16 00:00:00 2019-03-26 12:00:00 2019-04-06 00:00:00 2019-04-16 00:00:00 2019-04-26 00:00:00 2019-05-06 00:00:00 2019-05-16 00:00:00 2019-05-26 12:00:00 2019-06-06 00:00:00 2019-06-16 00:00:00 2019-06-26 00:00:00 2019-07-06 00:00:00 2019-07-16 00:00:00 2019-07-26 12:00:00 2019-08-06 00:00:00 2019-08-16 00:00:00 2019-08-26 12:00:00 2019-09-06 00:00:00 2019-09-16 00:00:00 2019-09-26 00:00:00 2019-10-06 00:00:00 2019-10-16 00:00:00 2019-10-26 12:00:00 2019-11-06 00:00:00 2019-11-16 00:00:00 2019-11-26 00:00:00 2019-12-06 00:00:00 2019-12-16 00:00:00 2019-12-26 12:00:00 2020-01-06 00:00:00 2020-01-16 00:00:00 2020-01-26 12:00:00 2020-02-06 00:00:00 2020-02-16 00:00:00 2020-02-25 00:00:00 2020-03-06 00:00:00 2020-03-16 00:00:00 2020-03-26 12:00:00 2020-04-06 00:00:00 2020-04-16 00:00:00 2020-04-26 00:00:00 2020-05-06 00:00:00 2020-05-16 00:00:00 2020-05-26 12:00:00 2020-06-06 00:00:00 2020-06-16 00:00:00 2020-06-26 00:00:00 2020-07-06 00:00:00 2020-07-16 00:00:00 2020-07-26 12:00:00 2020-08-06 00:00:00 2020-08-16 00:00:00 2020-08-26 12:00:00 2020-09-06 00:00:00 2020-09-16 00:00:00 2020-09-26 00:00:00 2020-10-06 00:00:00 2020-10-16 00:00:00 2020-10-26 12:00:00 2020-11-06 00:00:00 2020-11-16 00:00:00 2020-11-26 00:00:00 2020-12-06 00:00:00 2020-12-16 00:00:00 2020-12-26 12:00:00 2021-01-06 00:00:00 2021-01-16 00:00:00 2021-01-26 12:00:00 2021-02-06 00:00:00 2021-02-16 00:00:00 2021-02-25 00:00:00 2021-03-06 00:00:00 2021-03-16 00:00:00 2021-03-26 12:00:00 2021-04-06 00:00:00 2021-04-16 00:00:00 2021-04-26 00:00:00 2021-05-06 00:00:00 2021-05-16 00:00:00 2021-05-26 12:00:00 2021-06-06 00:00:00 2021-06-16 00:00:00 2021-06-26 00:00:00 2021-07-06 00:00:00 2021-07-16 00:00:00 2021-07-26 12:00:00 2021-08-06 00:00:00 2021-08-16 00:00:00 2021-08-26 12:00:00 2021-09-06 00:00:00 2021-09-16 00:00:00 2021-09-26 00:00:00 2021-10-06 00:00:00 2021-10-16 00:00:00 2021-10-26 12:00:00 2021-11-06 00:00:00 2021-11-16 00:00:00 2021-11-26 00:00:00 2021-12-06 00:00:00 2021-12-16 00:00:00 2021-12-26 12:00:00 2022-01-06 00:00:00 2022-01-16 00:00:00 2022-01-26 12:00:00 2022-02-06 00:00:00 2022-02-16 00:00:00 2022-02-25 00:00:00 2022-03-06 00:00:00 2022-03-16 00:00:00 2022-03-26 12:00:00 2022-04-06 00:00:00 2022-04-16 00:00:00 2022-04-26 00:00:00 2022-05-06 00:00:00 2022-05-16 00:00:00 2022-05-26 12:00:00 2022-06-06 00:00:00 2022-06-16 00:00:00 2022-06-26 00:00:00 2022-07-06 00:00:00 2022-07-16 00:00:00 2022-07-26 12:00:00 2022-08-06 00:00:00 2022-08-16 00:00:00 2022-08-26 12:00:00 2022-09-06 00:00:00 2022-09-16 00:00:00 2022-09-26 00:00:00 2022-10-06 00:00:00 2022-10-16 00:00:00 2022-10-26 12:00:00 2022-11-06 00:00:00 2022-11-16 00:00:00 2022-11-26 00:00:00 2022-12-06 00:00:00 2022-12-16 00:00:00 2022-12-26 12:00:00 2023-01-06 00:00:00 2023-01-16 00:00:00 2023-01-26 12:00:00 2023-02-06 00:00:00 2023-02-16 00:00:00 2023-02-25 00:00:00 2023-03-06 00:00:00 2023-03-16 00:00:00 2023-03-26 12:00:00 2023-04-06 00:00:00 2023-04-16 00:00:00 2023-04-26 00:00:00 2023-05-06 00:00:00 2023-05-16 00:00:00 2023-05-26 12:00:00 2023-06-06 00:00:00 2023-06-16 00:00:00 2023-06-26 00:00:00 2023-07-06 00:00:00 2023-07-16 00:00:00 2023-07-26 12:00:00 2023-08-06 00:00:00 2023-08-16 00:00:00 2023-08-26 12:00:00 2023-09-06 00:00:00 2023-09-16 00:00:00 2023-09-26 00:00:00 2023-10-06 00:00:00 2023-10-16 00:00:00 2023-10-26 12:00:00 2023-11-06 00:00:00 2023-11-16 00:00:00 2023-11-26 00:00:00 2023-12-06 00:00:00 2023-12-16 00:00:00 2023-12-26 12:00:00 2024-01-06 00:00:00 2024-01-16 00:00:00 2024-01-26 12:00:00 2024-02-06 00:00:00 2024-02-16 00:00:00 2024-02-25 00:00:00 2024-03-06 00:00:00 2024-03-16 00:00:00 2024-03-26 12:00:00 2024-04-06 00:00:00 2024-04-16 00:00:00 2024-04-26 00:00:00 2024-05-06 00:00:00 2024-05-16 00:00:00 2024-05-26 12:00:00 2024-06-06 00:00:00 2024-06-16 00:00:00 2024-06-26 00:00:00 2024-07-06 00:00:00 2024-07-16 00:00:00 2024-07-26 12:00:00 2024-08-06 00:00:00 2024-08-16 00:00:00 2024-08-26 12:00:00 2024-09-06 00:00:00 2024-09-16 00:00:00 2024-09-26 00:00:00 2024-10-06 00:00:00 2024-10-16 00:00:00 2024-10-26 12:00:00 2024-11-06 00:00:00 2024-11-16 00:00:00 2024-11-26 00:00:00 2024-12-06 00:00:00 2024-12-16 00:00:00 2024-12-26 12:00:00 2025-01-06 00:00:00 2025-01-16 00:00:00 2025-01-26 12:00:00 2025-02-06 00:00:00 2025-02-16 00:00:00 2025-02-25 00:00:00 2025-03-06 00:00:00 2025-03-16 00:00:00 2025-03-26 12:00:00 2025-04-06 00:00:00 2025-04-16 00:00:00 2025-04-26 00:00:00 2025-05-06 00:00:00 2025-05-16 00:00:00 2025-05-26 12:00:00 2025-06-06 00:00:00 2025-06-16 00:00:00 2025-06-26 00:00:00 2025-07-06 00:00:00 2025-07-16 00:00:00 2025-07-26 12:00:00 2025-08-06 00:00:00 2025-08-16 00:00:00 2025-08-26 12:00:00 2025-09-06 00:00:00 2025-09-16 00:00:00 2025-09-26 00:00:00 2025-10-06 00:00:00 2025-10-16 00:00:00 2025-10-26 12:00:00 2025-11-06 00:00:00 2025-11-16 00:00:00 2025-11-26 00:00:00 2025-12-06 00:00:00 2025-12-16 00:00:00 2025-12-26 12:00:00 2026-01-06 00:00:00 2026-01-16 00:00:00 2026-01-26 12:00:00 2026-02-06 00:00:00 2026-02-16 00:00:00 2026-02-25 00:00:00 2026-03-06 00:00:00 2026-03-16 00:00:00 2026-03-26 12:00:00 2026-04-06 00:00:00 2026-04-16 00:00:00 2026-04-26 00:00:00 2026-05-06 00:00:00 2026-05-16 00:00:00 2026-05-26 12:00:00 2026-06-06 00:00:00 2026-06-16 00:00:00 2026-06-26 00:00:00 2026-07-06 00:00:00 2026-07-16 00:00:00 2026-07-26 12:00:00 2026-08-06 00:00:00 2026-08-16 00:00:00 2026-08-26 12:00:00 2026-09-06 00:00:00 2026-09-16 00:00:00 2026-09-26 00:00:00 2026-10-06 00:00:00 2026-10-16 00:00:00 2026-10-26 12:00:00 2026-11-06 00:00:00 2026-11-16 00:00:00 2026-11-26 00:00:00 2026-12-06 00:00:00 2026-12-16 00:00:00 2026-12-26 12:00:00 2027-01-06 00:00:00 2027-01-16 00:00:00 2027-01-26 12:00:00 2027-02-06 00:00:00 2027-02-16 00:00:00 2027-02-25 00:00:00 2027-03-06 00:00:00 2027-03-16 00:00:00 2027-03-26 12:00:00 2027-04-06 00:00:00 2027-04-16 00:00:00 2027-04-26 00:00:00 2027-05-06 00:00:00 2027-05-16 00:00:00 2027-05-26 12:00:00 2027-06-06 00:00:00 2027-06-16 00:00:00 2027-06-26 00:00:00 2027-07-06 00:00:00 2027-07-16 00:00:00 2027-07-26 12:00:00 2027-08-06 00:00:00 2027-08-16 00:00:00 2027-08-26 12:00:00 2027-09-06 00:00:00 2027-09-16 00:00:00 2027-09-26 00:00:00 2027-10-06 00:00:00 2027-10-16 00:00:00 2027-10-26 12:00:00 2027-11-06 00:00:00 2027-11-16 00:00:00 2027-11-26 00:00:00 2027-12-06 00:00:00 2027-12-16 00:00:00 2027-12-26 12:00:00 2028-01-06 00:00:00 2028-01-16 00:00:00 2028-01-26 12:00:00 2028-02-06 00:00:00 2028-02-16 00:00:00 2028-02-25 00:00:00 2028-03-06 00:00:00 2028-03-16 00:00:00 2028-03-26 12:00:00 2028-04-06 00:00:00 2028-04-16 00:00:00 2028-04-26 00:00:00 2028-05-06 00:00:00 2028-05-16 00:00:00 2028-05-26 12:00:00 2028-06-06 00:00:00 2028-06-16 00:00:00 2028-06-26 00:00:00 2028-07-06 00:00:00 2028-07-16 00:00:00 2028-07-26 12:00:00 2028-08-06 00:00:00 2028-08-16 00:00:00 2028-08-26 12:00:00 2028-09-06 00:00:00 2028-09-16 00:00:00 2028-09-26 00:00:00 2028-10-06 00:00:00 2028-10-16 00:00:00 2028-10-26 12:00:00 2028-11-06 00:00:00 2028-11-16 00:00:00 2028-11-26 00:00:00 2028-12-06 00:00:00 2028-12-16 00:00:00 2028-12-26 12:00:00 2029-01-06 00:00:00 2029-01-16 00:00:00 2029-01-26 12:00:00 2029-02-06 00:00:00 2029-02-16 00:00:00 2029-02-25 00:00:00 2029-03-06 00:00:00 2029-03-16 00:00:00 2029-03-26 12:00:00 2029-04-06 00:00:00 2029-04-16 00:00:00 2029-04-26 00:00:00 2029-05-06 00:00:00 2029-05-16 00:00:00 2029-05-26 12:00:00 2029-06-06 00:00:00 2029-06-16 00:00:00 2029-06-26 00:00:00 2029-07-06 00:00:00 2029-07-16 00:00:00 2029-07-26 12:00:00 2029-08-06 00:00:00 2029-08-16 00:00:00 2029-08-26 12:00:00 2029-09-06 00:00:00 2029-09-16 00:00:00 2029-09-26 00:00:00 2029-10-06 00:00:00 2029-10-16 00:00:00 2029-10-26 12:00:00 2029-11-06 00:00:00 2029-11-16 00:00:00 2029-11-26 00:00:00 2029-12-06 00:00:00 2029-12-16 00:00:00 2029-12-26 12:00:00 2030-01-06 00:00:00 2030-01-16 00:00:00 2030-01-26 12:00:00 2030-02-06 00:00:00 2030-02-16 00:00:00 2030-02-25 00:00:00 2030-03-06 00:00:00 2030-03-16 00:00:00 2030-03-26 12:00:00 2030-04-06 00:00:00 2030-04-16 00:00:00 2030-04-26 00:00:00 2030-05-06 00:00:00 2030-05-16 00:00:00 2030-05-26 12:00:00 2030-06-06 00:00:00 2030-06-16 00:00:00 2030-06-26 00:00:00 2030-07-06 00:00:00 2030-07-16 00:00:00 2030-07-26 12:00:00 2030-08-06 00:00:00 2030-08-16 00:00:00 2030-08-26 12:00:00 2030-09-06 00:00:00 2030-09-16 00:00:00 2030-09-26 00:00:00 2030-10-06 00:00:00 2030-10-16 00:00:00 2030-10-26 12:00:00 2030-11-06 00:00:00 2030-11-16 00:00:00 2030-11-26 00:00:00 2030-12-06 00:00:00 2030-12-16 00:00:00 2030-12-26 12:00:00 2031-01-06 00:00:00 2031-01-16 00:00:00 2031-01-26 12:00:00 2031-02-06 00:00:00 2031-02-16 00:00:00 2031-02-25 00:00:00 2031-03-06 00:00:00 2031-03-16 00:00:00 2031-03-26 12:00:00 2031-04-06 00:00:00 2031-04-16 00:00:00 2031-04-26 00:00:00 2031-05-06 00:00:00 2031-05-16 00:00:00 2031-05-26 12:00:00 2031-06-06 00:00:00 2031-06-16 00:00:00 2031-06-26 00:00:00 2031-07-06 00:00:00 2031-07-16 00:00:00 2031-07-26 12:00:00 2031-08-06 00:00:00 2031-08-16 00:00:00 2031-08-26 12:00:00 2031-09-06 00:00:00 2031-09-16 00:00:00 2031-09-26 00:00:00 2031-10-06 00:00:00 2031-10-16 00:00:00 2031-10-26 12:00:00 2031-11-06 00:00:00 2031-11-16 00:00:00 2031-11-26 00:00:00 2031-12-06 00:00:00 2031-12-16 00:00:00 2031-12-26 12:00:00 2032-01-06 00:00:00 2032-01-16 00:00:00 2032-01-26 12:00:00 2032-02-06 00:00:00 2032-02-16 00:00:00 2032-02-25 00:00:00 2032-03-06 00:00:00 2032-03-16 00:00:00 2032-03-26 12:00:00 2032-04-06 00:00:00 2032-04-16 00:00:00 2032-04-26 00:00:00 2032-05-06 00:00:00 2032-05-16 00:00:00 2032-05-26 12:00:00 2032-06-06 00:00:00 2032-06-16 00:00:00 2032-06-26 00:00:00 2032-07-06 00:00:00 2032-07-16 00:00:00 2032-07-26 12:00:00 2032-08-06 00:00:00 2032-08-16 00:00:00 2032-08-26 12:00:00 2032-09-06 00:00:00 2032-09-16 00:00:00 2032-09-26 00:00:00 2032-10-06 00:00:00 2032-10-16 00:00:00 2032-10-26 12:00:00 2032-11-06 00:00:00 2032-11-16 00:00:00 2032-11-26 00:00:00 2032-12-06 00:00:00 2032-12-16 00:00:00 2032-12-26 12:00:00 2033-01-06 00:00:00 2033-01-16 00:00:00 2033-01-26 12:00:00 2033-02-06 00:00:00 2033-02-16 00:00:00 2033-02-25 00:00:00 2033-03-06 00:00:00 2033-03-16 00:00:00 2033-03-26 12:00:00 2033-04-06 00:00:00 2033-04-16 00:00:00 2033-04-26 00:00:00 2033-05-06 00:00:00 2033-05-16 00:00:00 2033-05-26 12:00:00 2033-06-06 00:00:00 2033-06-16 00:00:00 2033-06-26 00:00:00 2033-07-06 00:00:00 2033-07-16 00:00:00 2033-07-26 12:00:00 2033-08-06 00:00:00 2033-08-16 00:00:00 2033-08-26 12:00:00 2033-09-06 00:00:00 2033-09-16 00:00:00 2033-09-26 00:00:00 2033-10-06 00:00:00 2033-10-16 00:00:00 2033-10-26 12:00:00 2033-11-06 00:00:00 2033-11-16 00:00:00 2033-11-26 00:00:00 2033-12-06 00:00:00 2033-12-16 00:00:00 2033-12-26 12:00:00 2034-01-06 00:00:00 2034-01-16 00:00:00 2034-01-26 12:00:00 2034-02-06 00:00:00 2034-02-16 00:00:00 2034-02-25 00:00:00 2034-03-06 00:00:00 2034-03-16 00:00:00 2034-03-26 12:00:00 2034-04-06 00:00:00 2034-04-16 00:00:00 2034-04-26 00:00:00 2034-05-06 00:00:00 2034-05-16 00:00:00 2034-05-26 12:00:00 2034-06-06 00:00:00 2034-06-16 00:00:00 2034-06-26 00:00:00 2034-07-06 00:00:00 2034-07-16 00:00:00 2034-07-26 12:00:00 2034-08-06 00:00:00 2034-08-16 00:00:00 2034-08-26 12:00:00 2034-09-06 00:00:00 2034-09-16 00:00:00 2034-09-26 00:00:00 2034-10-06 00:00:00 2034-10-16 00:00:00 2034-10-26 12:00:00 2034-11-06 00:00:00 2034-11-16 00:00:00 2034-11-26 00:00:00 2034-12-06 00:00:00 2034-12-16 00:00:00 2034-12-26 12:00:00 2035-01-06 00:00:00 2035-01-16 00:00:00 2035-01-26 12:00:00 2035-02-06 00:00:00 2035-02-16 00:00:00 2035-02-25 00:00:00 2035-03-06 00:00:00 2035-03-16 00:00:00 2035-03-26 12:00:00 2035-04-06 00:00:00 2035-04-16 00:00:00 2035-04-26 00:00:00 2035-05-06 00:00:00 2035-05-16 00:00:00 2035-05-26 12:00:00 2035-06-06 00:00:00 2035-06-16 00:00:00 2035-06-26 00:00:00 2035-07-06 00:00:00 2035-07-16 00:00:00 2035-07-26 12:00:00 2035-08-06 00:00:00 2035-08-16 00:00:00 2035-08-26 12:00:00 2035-09-06 00:00:00 2035-09-16 00:00:00 2035-09-26 00:00:00 2035-10-06 00:00:00 2035-10-16 00:00:00 2035-10-26 12:00:00 2035-11-06 00:00:00 2035-11-16 00:00:00 2035-11-26 00:00:00 2035-12-06 00:00:00 2035-12-16 00:00:00 2035-12-26 12:00:00 2036-01-06 00:00:00 2036-01-16 00:00:00 2036-01-26 12:00:00 2036-02-06 00:00:00 2036-02-16 00:00:00 2036-02-25 00:00:00 2036-03-06 00:00:00 2036-03-16 00:00:00 2036-03-26 12:00:00 2036-04-06 00:00:00 2036-04-16 00:00:00 2036-04-26 00:00:00 2036-05-06 00:00:00 2036-05-16 00:00:00 2036-05-26 12:00:00 2036-06-06 00:00:00 2036-06-16 00:00:00 2036-06-26 00:00:00 2036-07-06 00:00:00 2036-07-16 00:00:00 2036-07-26 12:00:00 2036-08-06 00:00:00 2036-08-16 00:00:00 2036-08-26 12:00:00 2036-09-06 00:00:00 2036-09-16 00:00:00 2036-09-26 00:00:00 2036-10-06 00:00:00 2036-10-16 00:00:00 2036-10-26 12:00:00 2036-11-06 00:00:00 2036-11-16 00:00:00 2036-11-26 00:00:00 2036-12-06 00:00:00 2036-12-16 00:00:00 2036-12-26 12:00:00 2037-01-06 00:00:00 2037-01-16 00:00:00 2037-01-26 12:00:00 2037-02-06 00:00:00 2037-02-16 00:00:00 2037-02-25 00:00:00 2037-03-06 00:00:00 2037-03-16 00:00:00 2037-03-26 12:00:00 2037-04-06 00:00:00 2037-04-16 00:00:00 2037-04-26 00:00:00 2037-05-06 00:00:00 2037-05-16 00:00:00 2037-05-26 12:00:00 2037-06-06 00:00:00 2037-06-16 00:00:00 2037-06-26 00:00:00 2037-07-06 00:00:00 2037-07-16 00:00:00 2037-07-26 12:00:00 2037-08-06 00:00:00 2037-08-16 00:00:00 2037-08-26 12:00:00 2037-09-06 00:00:00 2037-09-16 00:00:00 2037-09-26 00:00:00 2037-10-06 00:00:00 2037-10-16 00:00:00 2037-10-26 12:00:00 2037-11-06 00:00:00 2037-11-16 00:00:00 2037-11-26 00:00:00 2037-12-06 00:00:00 2037-12-16 00:00:00 2037-12-26 12:00:00 2038-01-06 00:00:00 2038-01-16 00:00:00 2038-01-26 12:00:00 2038-02-06 00:00:00 2038-02-16 00:00:00 2038-02-25 00:00:00 2038-03-06 00:00:00 2038-03-16 00:00:00 2038-03-26 12:00:00 2038-04-06 00:00:00 2038-04-16 00:00:00 2038-04-26 00:00:00 2038-05-06 00:00:00 2038-05-16 00:00:00 2038-05-26 12:00:00 2038-06-06 00:00:00 2038-06-16 00:00:00 2038-06-26 00:00:00 2038-07-06 00:00:00 2038-07-16 00:00:00 2038-07-26 12:00:00 2038-08-06 00:00:00 2038-08-16 00:00:00 2038-08-26 12:00:00 2038-09-06 00:00:00 2038-09-16 00:00:00 2038-09-26 00:00:00 2038-10-06 00:00:00 2038-10-16 00:00:00 2038-10-26 12:00:00 2038-11-06 00:00:00 2038-11-16 00:00:00 2038-11-26 00:00:00 2038-12-06 00:00:00 2038-12-16 00:00:00 2038-12-26 12:00:00 2039-01-06 00:00:00 2039-01-16 00:00:00 2039-01-26 12:00:00 2039-02-06 00:00:00 2039-02-16 00:00:00 2039-02-25 00:00:00 2039-03-06 00:00:00 2039-03-16 00:00:00 2039-03-26 12:00:00 2039-04-06 00:00:00 2039-04-16 00:00:00 2039-04-26 00:00:00 2039-05-06 00:00:00 2039-05-16 00:00:00 2039-05-26 12:00:00 2039-06-06 00:00:00 2039-06-16 00:00:00 2039-06-26 00:00:00 2039-07-06 00:00:00 2039-07-16 00:00:00 2039-07-26 12:00:00 2039-08-06 00:00:00 2039-08-16 00:00:00 2039-08-26 12:00:00 2039-09-06 00:00:00 2039-09-16 00:00:00 2039-09-26 00:00:00 2039-10-06 00:00:00 2039-10-16 00:00:00 2039-10-26 12:00:00 2039-11-06 00:00:00 2039-11-16 00:00:00 2039-11-26 00:00:00 2039-12-06 00:00:00 2039-12-16 00:00:00 2039-12-26 12:00:00 2040-01-06 00:00:00 2040-01-16 00:00:00 2040-01-26 12:00:00 2040-02-06 00:00:00 2040-02-16 00:00:00 2040-02-25 00:00:00 2040-03-06 00:00:00 2040-03-16 00:00:00 2040-03-26 12:00:00 2040-04-06 00:00:00 2040-04-16 00:00:00 2040-04-26 00:00:00 2040-05-06 00:00:00 2040-05-16 00:00:00 2040-05-26 12:00:00 2040-06-06 00:00:00 2040-06-16 00:00:00 2040-06-26 00:00:00 2040-07-06 00:00:00 2040-07-16 00:00:00 2040-07-26 12:00:00 2040-08-06 00:00:00 2040-08-16 00:00:00 2040-08-26 12:00:00 2040-09-06 00:00:00 2040-09-16 00:00:00 2040-09-26 00:00:00 2040-10-06 00:00:00 2040-10-16 00:00:00 2040-10-26 12:00:00 2040-11-06 00:00:00 2040-11-16 00:00:00 2040-11-26 00:00:00 2040-12-06 00:00:00 2040-12-16 00:00:00 2040-12-26 12:00:00 2041-01-06 00:00:00 2041-01-16 00:00:00 2041-01-26 12:00:00 2041-02-06 00:00:00 2041-02-16 00:00:00 2041-02-25 00:00:00 2041-03-06 00:00:00 2041-03-16 00:00:00 2041-03-26 12:00:00 2041-04-06 00:00:00 2041-04-16 00:00:00 2041-04-26 00:00:00 2041-05-06 00:00:00 2041-05-16 00:00:00 2041-05-26 12:00:00 2041-06-06 00:00:00 2041-06-16 00:00:00 2041-06-26 00:00:00 2041-07-06 00:00:00 2041-07-16 00:00:00 2041-07-26 12:00:00 2041-08-06 00:00:00 2041-08-16 00:00:00 2041-08-26 12:00:00 2041-09-06 00:00:00 2041-09-16 00:00:00 2041-09-26 00:00:00 2041-10-06 00:00:00 2041-10-16 00:00:00 2041-10-26 12:00:00 2041-11-06 00:00:00 2041-11-16 00:00:00 2041-11-26 00:00:00 2041-12-06 00:00:00 2041-12-16 00:00:00 2041-12-26 12:00:00 2042-01-06 00:00:00 2042-01-16 00:00:00 2042-01-26 12:00:00 2042-02-06 00:00:00 2042-02-16 00:00:00 2042-02-25 00:00:00 2042-03-06 00:00:00 2042-03-16 00:00:00 2042-03-26 12:00:00 2042-04-06 00:00:00 2042-04-16 00:00:00 2042-04-26 00:00:00 2042-05-06 00:00:00 2042-05-16 00:00:00 2042-05-26 12:00:00 2042-06-06 00:00:00 2042-06-16 00:00:00 2042-06-26 00:00:00 2042-07-06 00:00:00 2042-07-16 00:00:00 2042-07-26 12:00:00 2042-08-06 00:00:00 2042-08-16 00:00:00 2042-08-26 12:00:00 2042-09-06 00:00:00 2042-09-16 00:00:00 2042-09-26 00:00:00 2042-10-06 00:00:00 2042-10-16 00:00:00 2042-10-26 12:00:00 2042-11-06 00:00:00 2042-11-16 00:00:00 2042-11-26 00:00:00 2042-12-06 00:00:00 2042-12-16 00:00:00 2042-12-26 12:00:00 2043-01-06 00:00:00 2043-01-16 00:00:00 2043-01-26 12:00:00 2043-02-06 00:00:00 2043-02-16 00:00:00 2043-02-25 00:00:00 2043-03-06 00:00:00 2043-03-16 00:00:00 2043-03-26 12:00:00 2043-04-06 00:00:00 2043-04-16 00:00:00 2043-04-26 00:00:00 2043-05-06 00:00:00 2043-05-16 00:00:00 2043-05-26 12:00:00 2043-06-06 00:00:00 2043-06-16 00:00:00 2043-06-26 00:00:00 2043-07-06 00:00:00 2043-07-16 00:00:00 2043-07-26 12:00:00 2043-08-06 00:00:00 2043-08-16 00:00:00 2043-08-26 12:00:00 2043-09-06 00:00:00 2043-09-16 00:00:00 2043-09-26 00:00:00 2043-10-06 00:00:00 2043-10-16 00:00:00 2043-10-26 12:00:00 2043-11-06 00:00:00 2043-11-16 00:00:00 2043-11-26 00:00:00 2043-12-06 00:00:00 2043-12-16 00:00:00 2043-12-26 12:00:00 2044-01-06 00:00:00 2044-01-16 00:00:00 2044-01-26 12:00:00 2044-02-06 00:00:00 2044-02-16 00:00:00 2044-02-25 00:00:00 2044-03-06 00:00:00 2044-03-16 00:00:00 2044-03-26 12:00:00 2044-04-06 00:00:00 2044-04-16 00:00:00 2044-04-26 00:00:00 2044-05-06 00:00:00 2044-05-16 00:00:00 2044-05-26 12:00:00 2044-06-06 00:00:00 2044-06-16 00:00:00 2044-06-26 00:00:00 2044-07-06 00:00:00 2044-07-16 00:00:00 2044-07-26 12:00:00 2044-08-06 00:00:00 2044-08-16 00:00:00 2044-08-26 12:00:00 2044-09-06 00:00:00 2044-09-16 00:00:00 2044-09-26 00:00:00 2044-10-06 00:00:00 2044-10-16 00:00:00 2044-10-26 12:00:00 2044-11-06 00:00:00 2044-11-16 00:00:00 2044-11-26 00:00:00 2044-12-06 00:00:00 2044-12-16 00:00:00 2044-12-26 12:00:00 2045-01-06 00:00:00 2045-01-16 00:00:00 2045-01-26 12:00:00 2045-02-06 00:00:00 2045-02-16 00:00:00 2045-02-25 00:00:00 2045-03-06 00:00:00 2045-03-16 00:00:00 2045-03-26 12:00:00 2045-04-06 00:00:00 2045-04-16 00:00:00 2045-04-26 00:00:00 2045-05-06 00:00:00 2045-05-16 00:00:00 2045-05-26 12:00:00 2045-06-06 00:00:00 2045-06-16 00:00:00 2045-06-26 00:00:00 2045-07-06 00:00:00 2045-07-16 00:00:00 2045-07-26 12:00:00 2045-08-06 00:00:00 2045-08-16 00:00:00 2045-08-26 12:00:00 2045-09-06 00:00:00 2045-09-16 00:00:00 2045-09-26 00:00:00 2045-10-06 00:00:00 2045-10-16 00:00:00 2045-10-26 12:00:00 2045-11-06 00:00:00 2045-11-16 00:00:00 2045-11-26 00:00:00 2045-12-06 00:00:00 2045-12-16 00:00:00 2045-12-26 12:00:00 2046-01-06 00:00:00 2046-01-16 00:00:00 2046-01-26 12:00:00 2046-02-06 00:00:00 2046-02-16 00:00:00 2046-02-25 00:00:00 2046-03-06 00:00:00 2046-03-16 00:00:00 2046-03-26 12:00:00 2046-04-06 00:00:00 2046-04-16 00:00:00 2046-04-26 00:00:00 2046-05-06 00:00:00 2046-05-16 00:00:00 2046-05-26 12:00:00 2046-06-06 00:00:00 2046-06-16 00:00:00 2046-06-26 00:00:00 2046-07-06 00:00:00 2046-07-16 00:00:00 2046-07-26 12:00:00 2046-08-06 00:00:00 2046-08-16 00:00:00 2046-08-26 12:00:00 2046-09-06 00:00:00 2046-09-16 00:00:00 2046-09-26 00:00:00 2046-10-06 00:00:00 2046-10-16 00:00:00 2046-10-26 12:00:00 2046-11-06 00:00:00 2046-11-16 00:00:00 2046-11-26 00:00:00 2046-12-06 00:00:00 2046-12-16 00:00:00 2046-12-26 12:00:00 2047-01-06 00:00:00 2047-01-16 00:00:00 2047-01-26 12:00:00 2047-02-06 00:00:00 2047-02-16 00:00:00 2047-02-25 00:00:00 2047-03-06 00:00:00 2047-03-16 00:00:00 2047-03-26 12:00:00 2047-04-06 00:00:00 2047-04-16 00:00:00 2047-04-26 00:00:00 2047-05-06 00:00:00 2047-05-16 00:00:00 2047-05-26 12:00:00 2047-06-06 00:00:00 2047-06-16 00:00:00 2047-06-26 00:00:00 2047-07-06 00:00:00 2047-07-16 00:00:00 2047-07-26 12:00:00 2047-08-06 00:00:00 2047-08-16 00:00:00 2047-08-26 12:00:00 2047-09-06 00:00:00 2047-09-16 00:00:00 2047-09-26 00:00:00 2047-10-06 00:00:00 2047-10-16 00:00:00 2047-10-26 12:00:00 2047-11-06 00:00:00 2047-11-16 00:00:00 2047-11-26 00:00:00 2047-12-06 00:00:00 2047-12-16 00:00:00 2047-12-26 12:00:00 2048-01-06 00:00:00 2048-01-16 00:00:00 2048-01-26 12:00:00 2048-02-06 00:00:00 2048-02-16 00:00:00 2048-02-25 00:00:00 2048-03-06 00:00:00 2048-03-16 00:00:00 2048-03-26 12:00:00 2048-04-06 00:00:00 2048-04-16 00:00:00 2048-04-26 00:00:00 2048-05-06 00:00:00 2048-05-16 00:00:00 2048-05-26 12:00:00 2048-06-06 00:00:00 2048-06-16 00:00:00 2048-06-26 00:00:00 2048-07-06 00:00:00 2048-07-16 00:00:00 2048-07-26 12:00:00 2048-08-06 00:00:00 2048-08-16 00:00:00 2048-08-26 12:00:00 2048-09-06 00:00:00 2048-09-16 00:00:00 2048-09-26 00:00:00 2048-10-06 00:00:00 2048-10-16 00:00:00 2048-10-26 12:00:00 2048-11-06 00:00:00 2048-11-16 00:00:00 2048-11-26 00:00:00 2048-12-06 00:00:00 2048-12-16 00:00:00 2048-12-26 12:00:00 2049-01-06 00:00:00 2049-01-16 00:00:00 2049-01-26 12:00:00 2049-02-06 00:00:00 2049-02-16 00:00:00 2049-02-25 00:00:00 2049-03-06 00:00:00 2049-03-16 00:00:00 2049-03-26 12:00:00 2049-04-06 00:00:00 2049-04-16 00:00:00 2049-04-26 00:00:00 2049-05-06 00:00:00 2049-05-16 00:00:00 2049-05-26 12:00:00 2049-06-06 00:00:00 2049-06-16 00:00:00 2049-06-26 00:00:00 2049-07-06 00:00:00 2049-07-16 00:00:00 2049-07-26 12:00:00 2049-08-06 00:00:00 2049-08-16 00:00:00 2049-08-26 12:00:00 2049-09-06 00:00:00 2049-09-16 00:00:00 2049-09-26 00:00:00 2049-10-06 00:00:00 2049-10-16 00:00:00 2049-10-26 12:00:00 2049-11-06 00:00:00 2049-11-16 00:00:00 2049-11-26 00:00:00 2049-12-06 00:00:00 2049-12-16 00:00:00 2049-12-26 12:00:00 2050-01-06 00:00:00 2050-01-16 00:00:00 2050-01-26 12:00:00 2050-02-06 00:00:00 2050-02-16 00:00:00 2050-02-25 00:00:00 2050-03-06 00:00:00 2050-03-16 00:00:00 2050-03-26 12:00:00 2050-04-06 00:00:00 2050-04-16 00:00:00 2050-04-26 00:00:00 2050-05-06 00:00:00 2050-05-16 00:00:00 2050-05-26 12:00:00 2050-06-06 00:00:00 2050-06-16 00:00:00 2050-06-26 00:00:00 2050-07-06 00:00:00 2050-07-16 00:00:00 2050-07-26 12:00:00 2050-08-06 00:00:00 2050-08-16 00:00:00 2050-08-26 12:00:00 2050-09-06 00:00:00 2050-09-16 00:00:00 2050-09-26 00:00:00 2050-10-06 00:00:00 2050-10-16 00:00:00 2050-10-26 12:00:00 2050-11-06 00:00:00 2050-11-16 00:00:00 2050-11-26 00:00:00 2050-12-06 00:00:00 2050-12-16 00:00:00 2050-12-26 12:00:00 2051-01-06 00:00:00 2051-01-16 00:00:00 2051-01-26 12:00:00 2051-02-06 00:00:00 2051-02-16 00:00:00 2051-02-25 00:00:00 2051-03-06 00:00:00 2051-03-16 00:00:00 2051-03-26 12:00:00 2051-04-06 00:00:00 2051-04-16 00:00:00 2051-04-26 00:00:00 2051-05-06 00:00:00 2051-05-16 00:00:00 2051-05-26 12:00:00 2051-06-06 00:00:00 2051-06-16 00:00:00 2051-06-26 00:00:00 2051-07-06 00:00:00 2051-07-16 00:00:00 2051-07-26 12:00:00 2051-08-06 00:00:00 2051-08-16 00:00:00 2051-08-26 12:00:00 2051-09-06 00:00:00 2051-09-16 00:00:00 2051-09-26 00:00:00 2051-10-06 00:00:00 2051-10-16 00:00:00 2051-10-26 12:00:00 2051-11-06 00:00:00 2051-11-16 00:00:00 2051-11-26 00:00:00 2051-12-06 00:00:00 2051-12-16 00:00:00 2051-12-26 12:00:00 2052-01-06 00:00:00 2052-01-16 00:00:00 2052-01-26 12:00:00 2052-02-06 00:00:00 2052-02-16 00:00:00 2052-02-25 00:00:00 2052-03-06 00:00:00 2052-03-16 00:00:00 2052-03-26 12:00:00 2052-04-06 00:00:00 2052-04-16 00:00:00 2052-04-26 00:00:00 2052-05-06 00:00:00 2052-05-16 00:00:00 2052-05-26 12:00:00 2052-06-06 00:00:00 2052-06-16 00:00:00 2052-06-26 00:00:00 2052-07-06 00:00:00 2052-07-16 00:00:00 2052-07-26 12:00:00 2052-08-06 00:00:00 2052-08-16 00:00:00 2052-08-26 12:00:00 2052-09-06 00:00:00 2052-09-16 00:00:00 2052-09-26 00:00:00 2052-10-06 00:00:00 2052-10-16 00:00:00 2052-10-26 12:00:00 2052-11-06 00:00:00 2052-11-16 00:00:00 2052-11-26 00:00:00 2052-12-06 00:00:00 2052-12-16 00:00:00 2052-12-26 12:00:00 2053-01-06 00:00:00 2053-01-16 00:00:00 2053-01-26 12:00:00 2053-02-06 00:00:00 2053-02-16 00:00:00 2053-02-25 00:00:00 2053-03-06 00:00:00 2053-03-16 00:00:00 2053-03-26 12:00:00 2053-04-06 00:00:00 2053-04-16 00:00:00 2053-04-26 00:00:00 2053-05-06 00:00:00 2053-05-16 00:00:00 2053-05-26 12:00:00 2053-06-06 00:00:00 2053-06-16 00:00:00 2053-06-26 00:00:00 2053-07-06 00:00:00 2053-07-16 00:00:00 2053-07-26 12:00:00 2053-08-06 00:00:00 2053-08-16 00:00:00 2053-08-26 12:00:00 2053-09-06 00:00:00 2053-09-16 00:00:00 2053-09-26 00:00:00 2053-10-06 00:00:00 2053-10-16 00:00:00 2053-10-26 12:00:00 2053-11-06 00:00:00 2053-11-16 00:00:00 2053-11-26 00:00:00 2053-12-06 00:00:00 2053-12-16 00:00:00 2053-12-26 12:00:00 2054-01-06 00:00:00 2054-01-16 00:00:00 2054-01-26 12:00:00 2054-02-06 00:00:00 2054-02-16 00:00:00 2054-02-25 00:00:00 2054-03-06 00:00:00 2054-03-16 00:00:00 2054-03-26 12:00:00 2054-04-06 00:00:00 2054-04-16 00:00:00 2054-04-26 00:00:00 2054-05-06 00:00:00 2054-05-16 00:00:00 2054-05-26 12:00:00 2054-06-06 00:00:00 2054-06-16 00:00:00 2054-06-26 00:00:00 2054-07-06 00:00:00 2054-07-16 00:00:00 2054-07-26 12:00:00 2054-08-06 00:00:00 2054-08-16 00:00:00 2054-08-26 12:00:00 2054-09-06 00:00:00 2054-09-16 00:00:00 2054-09-26 00:00:00 2054-10-06 00:00:00 2054-10-16 00:00:00 2054-10-26 12:00:00 2054-11-06 00:00:00 2054-11-16 00:00:00 2054-11-26 00:00:00 2054-12-06 00:00:00 2054-12-16 00:00:00 2054-12-26 12:00:00 2055-01-06 00:00:00 2055-01-16 00:00:00 2055-01-26 12:00:00 2055-02-06 00:00:00 2055-02-16 00:00:00 2055-02-25 00:00:00 2055-03-06 00:00:00 2055-03-16 00:00:00 2055-03-26 12:00:00 2055-04-06 00:00:00 2055-04-16 00:00:00 2055-04-26 00:00:00 2055-05-06 00:00:00 2055-05-16 00:00:00 2055-05-26 12:00:00 2055-06-06 00:00:00 2055-06-16 00:00:00 2055-06-26 00:00:00 2055-07-06 00:00:00 2055-07-16 00:00:00 2055-07-26 12:00:00 2055-08-06 00:00:00 2055-08-16 00:00:00 2055-08-26 12:00:00 2055-09-06 00:00:00 2055-09-16 00:00:00 2055-09-26 00:00:00 2055-10-06 00:00:00 2055-10-16 00:00:00 2055-10-26 12:00:00 2055-11-06 00:00:00 2055-11-16 00:00:00 2055-11-26 00:00:00 2055-12-06 00:00:00 2055-12-16 00:00:00 2055-12-26 12:00:00 2056-01-06 00:00:00 2056-01-16 00:00:00 2056-01-26 12:00:00 2056-02-06 00:00:00 2056-02-16 00:00:00 2056-02-25 00:00:00 2056-03-06 00:00:00 2056-03-16 00:00:00 2056-03-26 12:00:00 2056-04-06 00:00:00 2056-04-16 00:00:00 2056-04-26 00:00:00 2056-05-06 00:00:00 2056-05-16 00:00:00 2056-05-26 12:00:00 2056-06-06 00:00:00 2056-06-16 00:00:00 2056-06-26 00:00:00 2056-07-06 00:00:00 2056-07-16 00:00:00 2056-07-26 12:00:00 2056-08-06 00:00:00 2056-08-16 00:00:00 2056-08-26 12:00:00 2056-09-06 00:00:00 2056-09-16 00:00:00 2056-09-26 00:00:00 2056-10-06 00:00:00 2056-10-16 00:00:00 2056-10-26 12:00:00 2056-11-06 00:00:00 2056-11-16 00:00:00 2056-11-26 00:00:00 2056-12-06 00:00:00 2056-12-16 00:00:00 2056-12-26 12:00:00 2057-01-06 00:00:00 2057-01-16 00:00:00 2057-01-26 12:00:00 2057-02-06 00:00:00 2057-02-16 00:00:00 2057-02-25 00:00:00 2057-03-06 00:00:00 2057-03-16 00:00:00 2057-03-26 12:00:00 2057-04-06 00:00:00 2057-04-16 00:00:00 2057-04-26 00:00:00 2057-05-06 00:00:00 2057-05-16 00:00:00 2057-05-26 12:00:00 2057-06-06 00:00:00 2057-06-16 00:00:00 2057-06-26 00:00:00 2057-07-06 00:00:00 2057-07-16 00:00:00 2057-07-26 12:00:00 2057-08-06 00:00:00 2057-08-16 00:00:00 2057-08-26 12:00:00 2057-09-06 00:00:00 2057-09-16 00:00:00 2057-09-26 00:00:00 2057-10-06 00:00:00 2057-10-16 00:00:00 2057-10-26 12:00:00 2057-11-06 00:00:00 2057-11-16 00:00:00 2057-11-26 00:00:00 2057-12-06 00:00:00 2057-12-16 00:00:00 2057-12-26 12:00:00 2058-01-06 00:00:00 2058-01-16 00:00:00 2058-01-26 12:00:00 2058-02-06 00:00:00 2058-02-16 00:00:00 2058-02-25 00:00:00 2058-03-06 00:00:00 2058-03-16 00:00:00 2058-03-26 12:00:00 2058-04-06 00:00:00 2058-04-16 00:00:00 2058-04-26 00:00:00 2058-05-06 00:00:00 2058-05-16 00:00:00 2058-05-26 12:00:00 2058-06-06 00:00:00 2058-06-16 00:00:00 2058-06-26 00:00:00 2058-07-06 00:00:00 2058-07-16 00:00:00 2058-07-26 12:00:00 2058-08-06 00:00:00 2058-08-16 00:00:00 2058-08-26 12:00:00 2058-09-06 00:00:00 2058-09-16 00:00:00 2058-09-26 00:00:00 2058-10-06 00:00:00 2058-10-16 00:00:00 2058-10-26 12:00:00 2058-11-06 00:00:00 2058-11-16 00:00:00 2058-11-26 00:00:00 2058-12-06 00:00:00 2058-12-16 00:00:00 2058-12-26 12:00:00 2059-01-06 00:00:00 2059-01-16 00:00:00 2059-01-26 12:00:00 2059-02-06 00:00:00 2059-02-16 00:00:00 2059-02-25 00:00:00 2059-03-06 00:00:00 2059-03-16 00:00:00 2059-03-26 12:00:00 2059-04-06 00:00:00 2059-04-16 00:00:00 2059-04-26 00:00:00 2059-05-06 00:00:00 2059-05-16 00:00:00 2059-05-26 12:00:00 2059-06-06 00:00:00 2059-06-16 00:00:00 2059-06-26 00:00:00 2059-07-06 00:00:00 2059-07-16 00:00:00 2059-07-26 12:00:00 2059-08-06 00:00:00 2059-08-16 00:00:00 2059-08-26 12:00:00 2059-09-06 00:00:00 2059-09-16 00:00:00 2059-09-26 00:00:00 2059-10-06 00:00:00 2059-10-16 00:00:00 2059-10-26 12:00:00 2059-11-06 00:00:00 2059-11-16 00:00:00 2059-11-26 00:00:00 2059-12-06 00:00:00 2059-12-16 00:00:00 2059-12-26 12:00:00 2060-01-06 00:00:00 2060-01-16 00:00:00 2060-01-26 12:00:00 2060-02-06 00:00:00 2060-02-16 00:00:00 2060-02-25 00:00:00 2060-03-06 00:00:00 2060-03-16 00:00:00 2060-03-26 12:00:00 2060-04-06 00:00:00 2060-04-16 00:00:00 2060-04-26 00:00:00 2060-05-06 00:00:00 2060-05-16 00:00:00 2060-05-26 12:00:00 2060-06-06 00:00:00 2060-06-16 00:00:00 2060-06-26 00:00:00 2060-07-06 00:00:00 2060-07-16 00:00:00 2060-07-26 12:00:00 2060-08-06 00:00:00 2060-08-16 00:00:00 2060-08-26 12:00:00 2060-09-06 00:00:00 2060-09-16 00:00:00 2060-09-26 00:00:00 2060-10-06 00:00:00 2060-10-16 00:00:00 2060-10-26 12:00:00 2060-11-06 00:00:00 2060-11-16 00:00:00 2060-11-26 00:00:00 2060-12-06 00:00:00 2060-12-16 00:00:00 2060-12-26 12:00:00 2061-01-06 00:00:00 2061-01-16 00:00:00 2061-01-26 12:00:00 2061-02-06 00:00:00 2061-02-16 00:00:00 2061-02-25 00:00:00 2061-03-06 00:00:00 2061-03-16 00:00:00 2061-03-26 12:00:00 2061-04-06 00:00:00 2061-04-16 00:00:00 2061-04-26 00:00:00 2061-05-06 00:00:00 2061-05-16 00:00:00 2061-05-26 12:00:00 2061-06-06 00:00:00 2061-06-16 00:00:00 2061-06-26 00:00:00 2061-07-06 00:00:00 2061-07-16 00:00:00 2061-07-26 12:00:00 2061-08-06 00:00:00 2061-08-16 00:00:00 2061-08-26 12:00:00 2061-09-06 00:00:00 2061-09-16 00:00:00 2061-09-26 00:00:00 2061-10-06 00:00:00 2061-10-16 00:00:00 2061-10-26 12:00:00 2061-11-06 00:00:00 2061-11-16 00:00:00 2061-11-26 00:00:00 2061-12-06 00:00:00 2061-12-16 00:00:00 2061-12-26 12:00:00 2062-01-06 00:00:00 2062-01-16 00:00:00 2062-01-26 12:00:00 2062-02-06 00:00:00 2062-02-16 00:00:00 2062-02-25 00:00:00 2062-03-06 00:00:00 2062-03-16 00:00:00 2062-03-26 12:00:00 2062-04-06 00:00:00 2062-04-16 00:00:00 2062-04-26 00:00:00 2062-05-06 00:00:00 2062-05-16 00:00:00 2062-05-26 12:00:00 2062-06-06 00:00:00 2062-06-16 00:00:00 2062-06-26 00:00:00 2062-07-06 00:00:00 2062-07-16 00:00:00 2062-07-26 12:00:00 2062-08-06 00:00:00 2062-08-16 00:00:00 2062-08-26 12:00:00 2062-09-06 00:00:00 2062-09-16 00:00:00 2062-09-26 00:00:00 2062-10-06 00:00:00 2062-10-16 00:00:00 2062-10-26 12:00:00 2062-11-06 00:00:00 2062-11-16 00:00:00 2062-11-26 00:00:00 2062-12-06 00:00:00 2062-12-16 00:00:00 2062-12-26 12:00:00 2063-01-06 00:00:00 2063-01-16 00:00:00 2063-01-26 12:00:00 2063-02-06 00:00:00 2063-02-16 00:00:00 2063-02-25 00:00:00 2063-03-06 00:00:00 2063-03-16 00:00:00 2063-03-26 12:00:00 2063-04-06 00:00:00 2063-04-16 00:00:00 2063-04-26 00:00:00 2063-05-06 00:00:00 2063-05-16 00:00:00 2063-05-26 12:00:00 2063-06-06 00:00:00 2063-06-16 00:00:00 2063-06-26 00:00:00 2063-07-06 00:00:00 2063-07-16 00:00:00 2063-07-26 12:00:00 2063-08-06 00:00:00 2063-08-16 00:00:00 2063-08-26 12:00:00 2063-09-06 00:00:00 2063-09-16 00:00:00 2063-09-26 00:00:00 2063-10-06 00:00:00 2063-10-16 00:00:00 2063-10-26 12:00:00 2063-11-06 00:00:00 2063-11-16 00:00:00 2063-11-26 00:00:00 2063-12-06 00:00:00 2063-12-16 00:00:00 2063-12-26 12:00:00 2064-01-06 00:00:00 2064-01-16 00:00:00 2064-01-26 12:00:00 2064-02-06 00:00:00 2064-02-16 00:00:00 2064-02-25 00:00:00 2064-03-06 00:00:00 2064-03-16 00:00:00 2064-03-26 12:00:00 2064-04-06 00:00:00 2064-04-16 00:00:00 2064-04-26 00:00:00 2064-05-06 00:00:00 2064-05-16 00:00:00 2064-05-26 12:00:00 2064-06-06 00:00:00 2064-06-16 00:00:00 2064-06-26 00:00:00 2064-07-06 00:00:00 2064-07-16 00:00:00 2064-07-26 12:00:00 2064-08-06 00:00:00 2064-08-16 00:00:00 2064-08-26 12:00:00 2064-09-06 00:00:00 2064-09-16 00:00:00 2064-09-26 00:00:00 2064-10-06 00:00:00 2064-10-16 00:00:00 2064-10-26 12:00:00 2064-11-06 00:00:00 2064-11-16 00:00:00 2064-11-26 00:00:00 2064-12-06 00:00:00 2064-12-16 00:00:00 2064-12-26 12:00:00 2065-01-06 00:00:00 2065-01-16 00:00:00 2065-01-26 12:00:00 2065-02-06 00:00:00 2065-02-16 00:00:00 2065-02-25 00:00:00 2065-03-06 00:00:00 2065-03-16 00:00:00 2065-03-26 12:00:00 2065-04-06 00:00:00 2065-04-16 00:00:00 2065-04-26 00:00:00 2065-05-06 00:00:00 2065-05-16 00:00:00 2065-05-26 12:00:00 2065-06-06 00:00:00 2065-06-16 00:00:00 2065-06-26 00:00:00 2065-07-06 00:00:00 2065-07-16 00:00:00 2065-07-26 12:00:00 2065-08-06 00:00:00 2065-08-16 00:00:00 2065-08-26 12:00:00 2065-09-06 00:00:00 2065-09-16 00:00:00 2065-09-26 00:00:00 2065-10-06 00:00:00 2065-10-16 00:00:00 2065-10-26 12:00:00 2065-11-06 00:00:00 2065-11-16 00:00:00 2065-11-26 00:00:00 2065-12-06 00:00:00 2065-12-16 00:00:00 2065-12-26 12:00:00 2066-01-06 00:00:00 2066-01-16 00:00:00 2066-01-26 12:00:00 2066-02-06 00:00:00 2066-02-16 00:00:00 2066-02-25 00:00:00 2066-03-06 00:00:00 2066-03-16 00:00:00 2066-03-26 12:00:00 2066-04-06 00:00:00 2066-04-16 00:00:00 2066-04-26 00:00:00 2066-05-06 00:00:00 2066-05-16 00:00:00 2066-05-26 12:00:00 2066-06-06 00:00:00 2066-06-16 00:00:00 2066-06-26 00:00:00 2066-07-06 00:00:00 2066-07-16 00:00:00 2066-07-26 12:00:00 2066-08-06 00:00:00 2066-08-16 00:00:00 2066-08-26 12:00:00 2066-09-06 00:00:00 2066-09-16 00:00:00 2066-09-26 00:00:00 2066-10-06 00:00:00 2066-10-16 00:00:00 2066-10-26 12:00:00 2066-11-06 00:00:00 2066-11-16 00:00:00 2066-11-26 00:00:00 2066-12-06 00:00:00 2066-12-16 00:00:00 2066-12-26 12:00:00 2067-01-06 00:00:00 2067-01-16 00:00:00 2067-01-26 12:00:00 2067-02-06 00:00:00 2067-02-16 00:00:00 2067-02-25 00:00:00 2067-03-06 00:00:00 2067-03-16 00:00:00 2067-03-26 12:00:00 2067-04-06 00:00:00 2067-04-16 00:00:00 2067-04-26 00:00:00 2067-05-06 00:00:00 2067-05-16 00:00:00 2067-05-26 12:00:00 2067-06-06 00:00:00 2067-06-16 00:00:00 2067-06-26 00:00:00 2067-07-06 00:00:00 2067-07-16 00:00:00 2067-07-26 12:00:00 2067-08-06 00:00:00 2067-08-16 00:00:00 2067-08-26 12:00:00 2067-09-06 00:00:00 2067-09-16 00:00:00 2067-09-26 00:00:00 2067-10-06 00:00:00 2067-10-16 00:00:00 2067-10-26 12:00:00 2067-11-06 00:00:00 2067-11-16 00:00:00 2067-11-26 00:00:00 2067-12-06 00:00:00 2067-12-16 00:00:00 2067-12-26 12:00:00 2068-01-06 00:00:00 2068-01-16 00:00:00 2068-01-26 12:00:00 2068-02-06 00:00:00 2068-02-16 00:00:00 2068-02-25 00:00:00 2068-03-06 00:00:00 2068-03-16 00:00:00 2068-03-26 12:00:00 2068-04-06 00:00:00 2068-04-16 00:00:00 2068-04-26 00:00:00 2068-05-06 00:00:00 2068-05-16 00:00:00 2068-05-26 12:00:00 2068-06-06 00:00:00 2068-06-16 00:00:00 2068-06-26 00:00:00 2068-07-06 00:00:00 2068-07-16 00:00:00 2068-07-26 12:00:00 2068-08-06 00:00:00 2068-08-16 00:00:00 2068-08-26 12:00:00 2068-09-06 00:00:00 2068-09-16 00:00:00 2068-09-26 00:00:00 2068-10-06 00:00:00 2068-10-16 00:00:00 2068-10-26 12:00:00 2068-11-06 00:00:00 2068-11-16 00:00:00 2068-11-26 00:00:00 2068-12-06 00:00:00 2068-12-16 00:00:00 2068-12-26 12:00:00 2069-01-06 00:00:00 2069-01-16 00:00:00 2069-01-26 12:00:00 2069-02-06 00:00:00 2069-02-16 00:00:00 2069-02-25 00:00:00 2069-03-06 00:00:00 2069-03-16 00:00:00 2069-03-26 12:00:00 2069-04-06 00:00:00 2069-04-16 00:00:00 2069-04-26 00:00:00 2069-05-06 00:00:00 2069-05-16 00:00:00 2069-05-26 12:00:00 2069-06-06 00:00:00 2069-06-16 00:00:00 2069-06-26 00:00:00 2069-07-06 00:00:00 2069-07-16 00:00:00 2069-07-26 12:00:00 2069-08-06 00:00:00 2069-08-16 00:00:00 2069-08-26 12:00:00 2069-09-06 00:00:00 2069-09-16 00:00:00 2069-09-26 00:00:00 2069-10-06 00:00:00 2069-10-16 00:00:00 2069-10-26 12:00:00 2069-11-06 00:00:00 2069-11-16 00:00:00 2069-11-26 00:00:00 2069-12-06 00:00:00 2069-12-16 00:00:00 2069-12-26 12:00:00 2070-01-06 00:00:00 2070-01-16 00:00:00 2070-01-26 12:00:00 2070-02-06 00:00:00 2070-02-16 00:00:00 2070-02-25 00:00:00 2070-03-06 00:00:00 2070-03-16 00:00:00 2070-03-26 12:00:00 2070-04-06 00:00:00 2070-04-16 00:00:00 2070-04-26 00:00:00 2070-05-06 00:00:00 2070-05-16 00:00:00 2070-05-26 12:00:00 2070-06-06 00:00:00 2070-06-16 00:00:00 2070-06-26 00:00:00 2070-07-06 00:00:00 2070-07-16 00:00:00 2070-07-26 12:00:00 2070-08-06 00:00:00 2070-08-16 00:00:00 2070-08-26 12:00:00 2070-09-06 00:00:00 2070-09-16 00:00:00 2070-09-26 00:00:00 2070-10-06 00:00:00 2070-10-16 00:00:00 2070-10-26 12:00:00 2070-11-06 00:00:00 2070-11-16 00:00:00 2070-11-26 00:00:00 2070-12-06 00:00:00 2070-12-16 00:00:00 2070-12-26 12:00:00 2071-01-06 00:00:00 2071-01-16 00:00:00 2071-01-26 12:00:00 2071-02-06 00:00:00 2071-02-16 00:00:00 2071-02-25 00:00:00 2071-03-06 00:00:00 2071-03-16 00:00:00 2071-03-26 12:00:00 2071-04-06 00:00:00 2071-04-16 00:00:00 2071-04-26 00:00:00 2071-05-06 00:00:00 2071-05-16 00:00:00 2071-05-26 12:00:00 2071-06-06 00:00:00 2071-06-16 00:00:00 2071-06-26 00:00:00 2071-07-06 00:00:00 2071-07-16 00:00:00 2071-07-26 12:00:00 2071-08-06 00:00:00 2071-08-16 00:00:00 2071-08-26 12:00:00 2071-09-06 00:00:00 2071-09-16 00:00:00 2071-09-26 00:00:00 2071-10-06 00:00:00 2071-10-16 00:00:00 2071-10-26 12:00:00 2071-11-06 00:00:00 2071-11-16 00:00:00 2071-11-26 00:00:00 2071-12-06 00:00:00 2071-12-16 00:00:00 2071-12-26 12:00:00 2072-01-06 00:00:00 2072-01-16 00:00:00 2072-01-26 12:00:00 2072-02-06 00:00:00 2072-02-16 00:00:00 2072-02-25 00:00:00 2072-03-06 00:00:00 2072-03-16 00:00:00 2072-03-26 12:00:00 2072-04-06 00:00:00 2072-04-16 00:00:00 2072-04-26 00:00:00 2072-05-06 00:00:00 2072-05-16 00:00:00 2072-05-26 12:00:00 2072-06-06 00:00:00 2072-06-16 00:00:00 2072-06-26 00:00:00 2072-07-06 00:00:00 2072-07-16 00:00:00 2072-07-26 12:00:00 2072-08-06 00:00:00 2072-08-16 00:00:00 2072-08-26 12:00:00 2072-09-06 00:00:00 2072-09-16 00:00:00 2072-09-26 00:00:00 2072-10-06 00:00:00 2072-10-16 00:00:00 2072-10-26 12:00:00 2072-11-06 00:00:00 2072-11-16 00:00:00 2072-11-26 00:00:00 2072-12-06 00:00:00 2072-12-16 00:00:00 2072-12-26 12:00:00 2073-01-06 00:00:00 2073-01-16 00:00:00 2073-01-26 12:00:00 2073-02-06 00:00:00 2073-02-16 00:00:00 2073-02-25 00:00:00 2073-03-06 00:00:00 2073-03-16 00:00:00 2073-03-26 12:00:00 2073-04-06 00:00:00 2073-04-16 00:00:00 2073-04-26 00:00:00 2073-05-06 00:00:00 2073-05-16 00:00:00 2073-05-26 12:00:00 2073-06-06 00:00:00 2073-06-16 00:00:00 2073-06-26 00:00:00 2073-07-06 00:00:00 2073-07-16 00:00:00 2073-07-26 12:00:00 2073-08-06 00:00:00 2073-08-16 00:00:00 2073-08-26 12:00:00 2073-09-06 00:00:00 2073-09-16 00:00:00 2073-09-26 00:00:00 2073-10-06 00:00:00 2073-10-16 00:00:00 2073-10-26 12:00:00 2073-11-06 00:00:00 2073-11-16 00:00:00 2073-11-26 00:00:00 2073-12-06 00:00:00 2073-12-16 00:00:00 2073-12-26 12:00:00 2074-01-06 00:00:00 2074-01-16 00:00:00 2074-01-26 12:00:00 2074-02-06 00:00:00 2074-02-16 00:00:00 2074-02-25 00:00:00 2074-03-06 00:00:00 2074-03-16 00:00:00 2074-03-26 12:00:00 2074-04-06 00:00:00 2074-04-16 00:00:00 2074-04-26 00:00:00 2074-05-06 00:00:00 2074-05-16 00:00:00 2074-05-26 12:00:00 2074-06-06 00:00:00 2074-06-16 00:00:00 2074-06-26 00:00:00 2074-07-06 00:00:00 2074-07-16 00:00:00 2074-07-26 12:00:00 2074-08-06 00:00:00 2074-08-16 00:00:00 2074-08-26 12:00:00 2074-09-06 00:00:00 2074-09-16 00:00:00 2074-09-26 00:00:00 2074-10-06 00:00:00 2074-10-16 00:00:00 2074-10-26 12:00:00 2074-11-06 00:00:00 2074-11-16 00:00:00 2074-11-26 00:00:00 2074-12-06 00:00:00 2074-12-16 00:00:00 2074-12-26 12:00:00 2075-01-06 00:00:00 2075-01-16 00:00:00 2075-01-26 12:00:00 2075-02-06 00:00:00 2075-02-16 00:00:00 2075-02-25 00:00:00 2075-03-06 00:00:00 2075-03-16 00:00:00 2075-03-26 12:00:00 2075-04-06 00:00:00 2075-04-16 00:00:00 2075-04-26 00:00:00 2075-05-06 00:00:00 2075-05-16 00:00:00 2075-05-26 12:00:00 2075-06-06 00:00:00 2075-06-16 00:00:00 2075-06-26 00:00:00 2075-07-06 00:00:00 2075-07-16 00:00:00 2075-07-26 12:00:00 2075-08-06 00:00:00 2075-08-16 00:00:00 2075-08-26 12:00:00 2075-09-06 00:00:00 2075-09-16 00:00:00 2075-09-26 00:00:00 2075-10-06 00:00:00 2075-10-16 00:00:00 2075-10-26 12:00:00 2075-11-06 00:00:00 2075-11-16 00:00:00 2075-11-26 00:00:00 2075-12-06 00:00:00 2075-12-16 00:00:00 2075-12-26 12:00:00 2076-01-06 00:00:00 2076-01-16 00:00:00 2076-01-26 12:00:00 2076-02-06 00:00:00 2076-02-16 00:00:00 2076-02-25 00:00:00 2076-03-06 00:00:00 2076-03-16 00:00:00 2076-03-26 12:00:00 2076-04-06 00:00:00 2076-04-16 00:00:00 2076-04-26 00:00:00 2076-05-06 00:00:00 2076-05-16 00:00:00 2076-05-26 12:00:00 2076-06-06 00:00:00 2076-06-16 00:00:00 2076-06-26 00:00:00 2076-07-06 00:00:00 2076-07-16 00:00:00 2076-07-26 12:00:00 2076-08-06 00:00:00 2076-08-16 00:00:00 2076-08-26 12:00:00 2076-09-06 00:00:00 2076-09-16 00:00:00 2076-09-26 00:00:00 2076-10-06 00:00:00 2076-10-16 00:00:00 2076-10-26 12:00:00 2076-11-06 00:00:00 2076-11-16 00:00:00 2076-11-26 00:00:00 2076-12-06 00:00:00 2076-12-16 00:00:00 2076-12-26 12:00:00 2077-01-06 00:00:00 2077-01-16 00:00:00 2077-01-26 12:00:00 2077-02-06 00:00:00 2077-02-16 00:00:00 2077-02-25 00:00:00 2077-03-06 00:00:00 2077-03-16 00:00:00 2077-03-26 12:00:00 2077-04-06 00:00:00 2077-04-16 00:00:00 2077-04-26 00:00:00 2077-05-06 00:00:00 2077-05-16 00:00:00 2077-05-26 12:00:00 2077-06-06 00:00:00 2077-06-16 00:00:00 2077-06-26 00:00:00 2077-07-06 00:00:00 2077-07-16 00:00:00 2077-07-26 12:00:00 2077-08-06 00:00:00 2077-08-16 00:00:00 2077-08-26 12:00:00 2077-09-06 00:00:00 2077-09-16 00:00:00 2077-09-26 00:00:00 2077-10-06 00:00:00 2077-10-16 00:00:00 2077-10-26 12:00:00 2077-11-06 00:00:00 2077-11-16 00:00:00 2077-11-26 00:00:00 2077-12-06 00:00:00 2077-12-16 00:00:00 2077-12-26 12:00:00 2078-01-06 00:00:00 2078-01-16 00:00:00 2078-01-26 12:00:00 2078-02-06 00:00:00 2078-02-16 00:00:00 2078-02-25 00:00:00 2078-03-06 00:00:00 2078-03-16 00:00:00 2078-03-26 12:00:00 2078-04-06 00:00:00 2078-04-16 00:00:00 2078-04-26 00:00:00 2078-05-06 00:00:00 2078-05-16 00:00:00 2078-05-26 12:00:00 2078-06-06 00:00:00 2078-06-16 00:00:00 2078-06-26 00:00:00 2078-07-06 00:00:00 2078-07-16 00:00:00 2078-07-26 12:00:00 2078-08-06 00:00:00 2078-08-16 00:00:00 2078-08-26 12:00:00 2078-09-06 00:00:00 2078-09-16 00:00:00 2078-09-26 00:00:00 2078-10-06 00:00:00 2078-10-16 00:00:00 2078-10-26 12:00:00 2078-11-06 00:00:00 2078-11-16 00:00:00 2078-11-26 00:00:00 2078-12-06 00:00:00 2078-12-16 00:00:00 2078-12-26 12:00:00 2079-01-06 00:00:00 2079-01-16 00:00:00 2079-01-26 12:00:00 2079-02-06 00:00:00 2079-02-16 00:00:00 2079-02-25 00:00:00 2079-03-06 00:00:00 2079-03-16 00:00:00 2079-03-26 12:00:00 2079-04-06 00:00:00 2079-04-16 00:00:00 2079-04-26 00:00:00 2079-05-06 00:00:00 2079-05-16 00:00:00 2079-05-26 12:00:00 2079-06-06 00:00:00 2079-06-16 00:00:00 2079-06-26 00:00:00 2079-07-06 00:00:00 2079-07-16 00:00:00 2079-07-26 12:00:00 2079-08-06 00:00:00 2079-08-16 00:00:00 2079-08-26 12:00:00 2079-09-06 00:00:00 2079-09-16 00:00:00 2079-09-26 00:00:00 2079-10-06 00:00:00 2079-10-16 00:00:00 2079-10-26 12:00:00 2079-11-06 00:00:00 2079-11-16 00:00:00 2079-11-26 00:00:00 2079-12-06 00:00:00 2079-12-16 00:00:00 2079-12-26 12:00:00 2080-01-06 00:00:00 2080-01-16 00:00:00 2080-01-26 12:00:00 2080-02-06 00:00:00 2080-02-16 00:00:00 2080-02-25 00:00:00 2080-03-06 00:00:00 2080-03-16 00:00:00 2080-03-26 12:00:00 2080-04-06 00:00:00 2080-04-16 00:00:00 2080-04-26 00:00:00 2080-05-06 00:00:00 2080-05-16 00:00:00 2080-05-26 12:00:00 2080-06-06 00:00:00 2080-06-16 00:00:00 2080-06-26 00:00:00 2080-07-06 00:00:00 2080-07-16 00:00:00 2080-07-26 12:00:00 2080-08-06 00:00:00 2080-08-16 00:00:00 2080-08-26 12:00:00 2080-09-06 00:00:00 2080-09-16 00:00:00 2080-09-26 00:00:00 2080-10-06 00:00:00 2080-10-16 00:00:00 2080-10-26 12:00:00 2080-11-06 00:00:00 2080-11-16 00:00:00 2080-11-26 00:00:00 2080-12-06 00:00:00 2080-12-16 00:00:00 2080-12-26 12:00:00 2081-01-06 00:00:00 2081-01-16 00:00:00 2081-01-26 12:00:00 2081-02-06 00:00:00 2081-02-16 00:00:00 2081-02-25 00:00:00 2081-03-06 00:00:00 2081-03-16 00:00:00 2081-03-26 12:00:00 2081-04-06 00:00:00 2081-04-16 00:00:00 2081-04-26 00:00:00 2081-05-06 00:00:00 2081-05-16 00:00:00 2081-05-26 12:00:00 2081-06-06 00:00:00 2081-06-16 00:00:00 2081-06-26 00:00:00 2081-07-06 00:00:00 2081-07-16 00:00:00 2081-07-26 12:00:00 2081-08-06 00:00:00 2081-08-16 00:00:00 2081-08-26 12:00:00 2081-09-06 00:00:00 2081-09-16 00:00:00 2081-09-26 00:00:00 2081-10-06 00:00:00 2081-10-16 00:00:00 2081-10-26 12:00:00 2081-11-06 00:00:00 2081-11-16 00:00:00 2081-11-26 00:00:00 2081-12-06 00:00:00 2081-12-16 00:00:00 2081-12-26 12:00:00 2082-01-06 00:00:00 2082-01-16 00:00:00 2082-01-26 12:00:00 2082-02-06 00:00:00 2082-02-16 00:00:00 2082-02-25 00:00:00 2082-03-06 00:00:00 2082-03-16 00:00:00 2082-03-26 12:00:00 2082-04-06 00:00:00 2082-04-16 00:00:00 2082-04-26 00:00:00 2082-05-06 00:00:00 2082-05-16 00:00:00 2082-05-26 12:00:00 2082-06-06 00:00:00 2082-06-16 00:00:00 2082-06-26 00:00:00 2082-07-06 00:00:00 2082-07-16 00:00:00 2082-07-26 12:00:00 2082-08-06 00:00:00 2082-08-16 00:00:00 2082-08-26 12:00:00 2082-09-06 00:00:00 2082-09-16 00:00:00 2082-09-26 00:00:00 2082-10-06 00:00:00 2082-10-16 00:00:00 2082-10-26 12:00:00 2082-11-06 00:00:00 2082-11-16 00:00:00 2082-11-26 00:00:00 2082-12-06 00:00:00 2082-12-16 00:00:00 2082-12-26 12:00:00 2083-01-06 00:00:00 2083-01-16 00:00:00 2083-01-26 12:00:00 2083-02-06 00:00:00 2083-02-16 00:00:00 2083-02-25 00:00:00 2083-03-06 00:00:00 2083-03-16 00:00:00 2083-03-26 12:00:00 2083-04-06 00:00:00 2083-04-16 00:00:00 2083-04-26 00:00:00 2083-05-06 00:00:00 2083-05-16 00:00:00 2083-05-26 12:00:00 2083-06-06 00:00:00 2083-06-16 00:00:00 2083-06-26 00:00:00 2083-07-06 00:00:00 2083-07-16 00:00:00 2083-07-26 12:00:00 2083-08-06 00:00:00 2083-08-16 00:00:00 2083-08-26 12:00:00 2083-09-06 00:00:00 2083-09-16 00:00:00 2083-09-26 00:00:00 2083-10-06 00:00:00 2083-10-16 00:00:00 2083-10-26 12:00:00 2083-11-06 00:00:00 2083-11-16 00:00:00 2083-11-26 00:00:00 2083-12-06 00:00:00 2083-12-16 00:00:00 2083-12-26 12:00:00 2084-01-06 00:00:00 2084-01-16 00:00:00 2084-01-26 12:00:00 2084-02-06 00:00:00 2084-02-16 00:00:00 2084-02-25 00:00:00 2084-03-06 00:00:00 2084-03-16 00:00:00 2084-03-26 12:00:00 2084-04-06 00:00:00 2084-04-16 00:00:00 2084-04-26 00:00:00 2084-05-06 00:00:00 2084-05-16 00:00:00 2084-05-26 12:00:00 2084-06-06 00:00:00 2084-06-16 00:00:00 2084-06-26 00:00:00 2084-07-06 00:00:00 2084-07-16 00:00:00 2084-07-26 12:00:00 2084-08-06 00:00:00 2084-08-16 00:00:00 2084-08-26 12:00:00 2084-09-06 00:00:00 2084-09-16 00:00:00 2084-09-26 00:00:00 2084-10-06 00:00:00 2084-10-16 00:00:00 2084-10-26 12:00:00 2084-11-06 00:00:00 2084-11-16 00:00:00 2084-11-26 00:00:00 2084-12-06 00:00:00 2084-12-16 00:00:00 2084-12-26 12:00:00 2085-01-06 00:00:00 2085-01-16 00:00:00 2085-01-26 12:00:00 2085-02-06 00:00:00 2085-02-16 00:00:00 2085-02-25 00:00:00 2085-03-06 00:00:00 2085-03-16 00:00:00 2085-03-26 12:00:00 2085-04-06 00:00:00 2085-04-16 00:00:00 2085-04-26 00:00:00 2085-05-06 00:00:00 2085-05-16 00:00:00 2085-05-26 12:00:00 2085-06-06 00:00:00 2085-06-16 00:00:00 2085-06-26 00:00:00 2085-07-06 00:00:00 2085-07-16 00:00:00 2085-07-26 12:00:00 2085-08-06 00:00:00 2085-08-16 00:00:00 2085-08-26 12:00:00 2085-09-06 00:00:00 2085-09-16 00:00:00 2085-09-26 00:00:00 2085-10-06 00:00:00 2085-10-16 00:00:00 2085-10-26 12:00:00 2085-11-06 00:00:00 2085-11-16 00:00:00 2085-11-26 00:00:00 2085-12-06 00:00:00 2085-12-16 00:00:00 2085-12-26 12:00:00 2086-01-06 00:00:00 2086-01-16 00:00:00 2086-01-26 12:00:00 2086-02-06 00:00:00 2086-02-16 00:00:00 2086-02-25 00:00:00 2086-03-06 00:00:00 2086-03-16 00:00:00 2086-03-26 12:00:00 2086-04-06 00:00:00 2086-04-16 00:00:00 2086-04-26 00:00:00 2086-05-06 00:00:00 2086-05-16 00:00:00 2086-05-26 12:00:00 2086-06-06 00:00:00 2086-06-16 00:00:00 2086-06-26 00:00:00 2086-07-06 00:00:00 2086-07-16 00:00:00 2086-07-26 12:00:00 2086-08-06 00:00:00 2086-08-16 00:00:00 2086-08-26 12:00:00 2086-09-06 00:00:00 2086-09-16 00:00:00 2086-09-26 00:00:00 2086-10-06 00:00:00 2086-10-16 00:00:00 2086-10-26 12:00:00 2086-11-06 00:00:00 2086-11-16 00:00:00 2086-11-26 00:00:00 2086-12-06 00:00:00 2086-12-16 00:00:00 2086-12-26 12:00:00 2087-01-06 00:00:00 2087-01-16 00:00:00 2087-01-26 12:00:00 2087-02-06 00:00:00 2087-02-16 00:00:00 2087-02-25 00:00:00 2087-03-06 00:00:00 2087-03-16 00:00:00 2087-03-26 12:00:00 2087-04-06 00:00:00 2087-04-16 00:00:00 2087-04-26 00:00:00 2087-05-06 00:00:00 2087-05-16 00:00:00 2087-05-26 12:00:00 2087-06-06 00:00:00 2087-06-16 00:00:00 2087-06-26 00:00:00 2087-07-06 00:00:00 2087-07-16 00:00:00 2087-07-26 12:00:00 2087-08-06 00:00:00 2087-08-16 00:00:00 2087-08-26 12:00:00 2087-09-06 00:00:00 2087-09-16 00:00:00 2087-09-26 00:00:00 2087-10-06 00:00:00 2087-10-16 00:00:00 2087-10-26 12:00:00 2087-11-06 00:00:00 2087-11-16 00:00:00 2087-11-26 00:00:00 2087-12-06 00:00:00 2087-12-16 00:00:00 2087-12-26 12:00:00 2088-01-06 00:00:00 2088-01-16 00:00:00 2088-01-26 12:00:00 2088-02-06 00:00:00 2088-02-16 00:00:00 2088-02-25 00:00:00 2088-03-06 00:00:00 2088-03-16 00:00:00 2088-03-26 12:00:00 2088-04-06 00:00:00 2088-04-16 00:00:00 2088-04-26 00:00:00 2088-05-06 00:00:00 2088-05-16 00:00:00 2088-05-26 12:00:00 2088-06-06 00:00:00 2088-06-16 00:00:00 2088-06-26 00:00:00 2088-07-06 00:00:00 2088-07-16 00:00:00 2088-07-26 12:00:00 2088-08-06 00:00:00 2088-08-16 00:00:00 2088-08-26 12:00:00 2088-09-06 00:00:00 2088-09-16 00:00:00 2088-09-26 00:00:00 2088-10-06 00:00:00 2088-10-16 00:00:00 2088-10-26 12:00:00 2088-11-06 00:00:00 2088-11-16 00:00:00 2088-11-26 00:00:00 2088-12-06 00:00:00 2088-12-16 00:00:00 2088-12-26 12:00:00 2089-01-06 00:00:00 2089-01-16 00:00:00 2089-01-26 12:00:00 2089-02-06 00:00:00 2089-02-16 00:00:00 2089-02-25 00:00:00 2089-03-06 00:00:00 2089-03-16 00:00:00 2089-03-26 12:00:00 2089-04-06 00:00:00 2089-04-16 00:00:00 2089-04-26 00:00:00 2089-05-06 00:00:00 2089-05-16 00:00:00 2089-05-26 12:00:00 2089-06-06 00:00:00 2089-06-16 00:00:00 2089-06-26 00:00:00 2089-07-06 00:00:00 2089-07-16 00:00:00 2089-07-26 12:00:00 2089-08-06 00:00:00 2089-08-16 00:00:00 2089-08-26 12:00:00 2089-09-06 00:00:00 2089-09-16 00:00:00 2089-09-26 00:00:00 2089-10-06 00:00:00 2089-10-16 00:00:00 2089-10-26 12:00:00 2089-11-06 00:00:00 2089-11-16 00:00:00 2089-11-26 00:00:00 2089-12-06 00:00:00 2089-12-16 00:00:00 2089-12-26 12:00:00 2090-01-06 00:00:00 2090-01-16 00:00:00 2090-01-26 12:00:00 2090-02-06 00:00:00 2090-02-16 00:00:00 2090-02-25 00:00:00 2090-03-06 00:00:00 2090-03-16 00:00:00 2090-03-26 12:00:00 2090-04-06 00:00:00 2090-04-16 00:00:00 2090-04-26 00:00:00 2090-05-06 00:00:00 2090-05-16 00:00:00 2090-05-26 12:00:00 2090-06-06 00:00:00 2090-06-16 00:00:00 2090-06-26 00:00:00 2090-07-06 00:00:00 2090-07-16 00:00:00 2090-07-26 12:00:00 2090-08-06 00:00:00 2090-08-16 00:00:00 2090-08-26 12:00:00 2090-09-06 00:00:00 2090-09-16 00:00:00 2090-09-26 00:00:00 2090-10-06 00:00:00 2090-10-16 00:00:00 2090-10-26 12:00:00 2090-11-06 00:00:00 2090-11-16 00:00:00 2090-11-26 00:00:00 2090-12-06 00:00:00 2090-12-16 00:00:00 2090-12-26 12:00:00 2091-01-06 00:00:00 2091-01-16 00:00:00 2091-01-26 12:00:00 2091-02-06 00:00:00 2091-02-16 00:00:00 2091-02-25 00:00:00 2091-03-06 00:00:00 2091-03-16 00:00:00 2091-03-26 12:00:00 2091-04-06 00:00:00 2091-04-16 00:00:00 2091-04-26 00:00:00 2091-05-06 00:00:00 2091-05-16 00:00:00 2091-05-26 12:00:00 2091-06-06 00:00:00 2091-06-16 00:00:00 2091-06-26 00:00:00 2091-07-06 00:00:00 2091-07-16 00:00:00 2091-07-26 12:00:00 2091-08-06 00:00:00 2091-08-16 00:00:00 2091-08-26 12:00:00 2091-09-06 00:00:00 2091-09-16 00:00:00 2091-09-26 00:00:00 2091-10-06 00:00:00 2091-10-16 00:00:00 2091-10-26 12:00:00 2091-11-06 00:00:00 2091-11-16 00:00:00 2091-11-26 00:00:00 2091-12-06 00:00:00 2091-12-16 00:00:00 2091-12-26 12:00:00 2092-01-06 00:00:00 2092-01-16 00:00:00 2092-01-26 12:00:00 2092-02-06 00:00:00 2092-02-16 00:00:00 2092-02-25 00:00:00 2092-03-06 00:00:00 2092-03-16 00:00:00 2092-03-26 12:00:00 2092-04-06 00:00:00 2092-04-16 00:00:00 2092-04-26 00:00:00 2092-05-06 00:00:00 2092-05-16 00:00:00 2092-05-26 12:00:00 2092-06-06 00:00:00 2092-06-16 00:00:00 2092-06-26 00:00:00 2092-07-06 00:00:00 2092-07-16 00:00:00 2092-07-26 12:00:00 2092-08-06 00:00:00 2092-08-16 00:00:00 2092-08-26 12:00:00 2092-09-06 00:00:00 2092-09-16 00:00:00 2092-09-26 00:00:00 2092-10-06 00:00:00 2092-10-16 00:00:00 2092-10-26 12:00:00 2092-11-06 00:00:00 2092-11-16 00:00:00 2092-11-26 00:00:00 2092-12-06 00:00:00 2092-12-16 00:00:00 2092-12-26 12:00:00 2093-01-06 00:00:00 2093-01-16 00:00:00 2093-01-26 12:00:00 2093-02-06 00:00:00 2093-02-16 00:00:00 2093-02-25 00:00:00 2093-03-06 00:00:00 2093-03-16 00:00:00 2093-03-26 12:00:00 2093-04-06 00:00:00 2093-04-16 00:00:00 2093-04-26 00:00:00 2093-05-06 00:00:00 2093-05-16 00:00:00 2093-05-26 12:00:00 2093-06-06 00:00:00 2093-06-16 00:00:00 2093-06-26 00:00:00 2093-07-06 00:00:00 2093-07-16 00:00:00 2093-07-26 12:00:00 2093-08-06 00:00:00 2093-08-16 00:00:00 2093-08-26 12:00:00 2093-09-06 00:00:00 2093-09-16 00:00:00 2093-09-26 00:00:00 2093-10-06 00:00:00 2093-10-16 00:00:00 2093-10-26 12:00:00 2093-11-06 00:00:00 2093-11-16 00:00:00 2093-11-26 00:00:00 2093-12-06 00:00:00 2093-12-16 00:00:00 2093-12-26 12:00:00 2094-01-06 00:00:00 2094-01-16 00:00:00 2094-01-26 12:00:00 2094-02-06 00:00:00 2094-02-16 00:00:00 2094-02-25 00:00:00 2094-03-06 00:00:00 2094-03-16 00:00:00 2094-03-26 12:00:00 2094-04-06 00:00:00 2094-04-16 00:00:00 2094-04-26 00:00:00 2094-05-06 00:00:00 2094-05-16 00:00:00 2094-05-26 12:00:00 2094-06-06 00:00:00 2094-06-16 00:00:00 2094-06-26 00:00:00 2094-07-06 00:00:00 2094-07-16 00:00:00 2094-07-26 12:00:00 2094-08-06 00:00:00 2094-08-16 00:00:00 2094-08-26 12:00:00 2094-09-06 00:00:00 2094-09-16 00:00:00 2094-09-26 00:00:00 2094-10-06 00:00:00 2094-10-16 00:00:00 2094-10-26 12:00:00 2094-11-06 00:00:00 2094-11-16 00:00:00 2094-11-26 00:00:00 2094-12-06 00:00:00 2094-12-16 00:00:00 2094-12-26 12:00:00 2095-01-06 00:00:00 2095-01-16 00:00:00 2095-01-26 12:00:00 2095-02-06 00:00:00 2095-02-16 00:00:00 2095-02-25 00:00:00 2095-03-06 00:00:00 2095-03-16 00:00:00 2095-03-26 12:00:00 2095-04-06 00:00:00 2095-04-16 00:00:00 2095-04-26 00:00:00 2095-05-06 00:00:00 2095-05-16 00:00:00 2095-05-26 12:00:00 2095-06-06 00:00:00 2095-06-16 00:00:00 2095-06-26 00:00:00 2095-07-06 00:00:00 2095-07-16 00:00:00 2095-07-26 12:00:00 2095-08-06 00:00:00 2095-08-16 00:00:00 2095-08-26 12:00:00 2095-09-06 00:00:00 2095-09-16 00:00:00 2095-09-26 00:00:00 2095-10-06 00:00:00 2095-10-16 00:00:00 2095-10-26 12:00:00 2095-11-06 00:00:00 2095-11-16 00:00:00 2095-11-26 00:00:00 2095-12-06 00:00:00 2095-12-16 00:00:00 2095-12-26 12:00:00 2096-01-06 00:00:00 2096-01-16 00:00:00 2096-01-26 12:00:00 2096-02-06 00:00:00 2096-02-16 00:00:00 2096-02-25 00:00:00 2096-03-06 00:00:00 2096-03-16 00:00:00 2096-03-26 12:00:00 2096-04-06 00:00:00 2096-04-16 00:00:00 2096-04-26 00:00:00 2096-05-06 00:00:00 2096-05-16 00:00:00 2096-05-26 12:00:00 2096-06-06 00:00:00 2096-06-16 00:00:00 2096-06-26 00:00:00 2096-07-06 00:00:00 2096-07-16 00:00:00 2096-07-26 12:00:00 2096-08-06 00:00:00 2096-08-16 00:00:00 2096-08-26 12:00:00 2096-09-06 00:00:00 2096-09-16 00:00:00 2096-09-26 00:00:00 2096-10-06 00:00:00 2096-10-16 00:00:00 2096-10-26 12:00:00 2096-11-06 00:00:00 2096-11-16 00:00:00 2096-11-26 00:00:00 2096-12-06 00:00:00 2096-12-16 00:00:00 2096-12-26 12:00:00 2097-01-06 00:00:00 2097-01-16 00:00:00 2097-01-26 12:00:00 2097-02-06 00:00:00 2097-02-16 00:00:00 2097-02-25 00:00:00 2097-03-06 00:00:00 2097-03-16 00:00:00 2097-03-26 12:00:00 2097-04-06 00:00:00 2097-04-16 00:00:00 2097-04-26 00:00:00 2097-05-06 00:00:00 2097-05-16 00:00:00 2097-05-26 12:00:00 2097-06-06 00:00:00 2097-06-16 00:00:00 2097-06-26 00:00:00 2097-07-06 00:00:00 2097-07-16 00:00:00 2097-07-26 12:00:00 2097-08-06 00:00:00 2097-08-16 00:00:00 2097-08-26 12:00:00 2097-09-06 00:00:00 2097-09-16 00:00:00 2097-09-26 00:00:00 2097-10-06 00:00:00 2097-10-16 00:00:00 2097-10-26 12:00:00 2097-11-06 00:00:00 2097-11-16 00:00:00 2097-11-26 00:00:00 2097-12-06 00:00:00 2097-12-16 00:00:00 2097-12-26 12:00:00 2098-01-06 00:00:00 2098-01-16 00:00:00 2098-01-26 12:00:00 2098-02-06 00:00:00 2098-02-16 00:00:00 2098-02-25 00:00:00 2098-03-06 00:00:00 2098-03-16 00:00:00 2098-03-26 12:00:00 2098-04-06 00:00:00 2098-04-16 00:00:00 2098-04-26 00:00:00 2098-05-06 00:00:00 2098-05-16 00:00:00 2098-05-26 12:00:00 2098-06-06 00:00:00 2098-06-16 00:00:00 2098-06-26 00:00:00 2098-07-06 00:00:00 2098-07-16 00:00:00 2098-07-26 12:00:00 2098-08-06 00:00:00 2098-08-16 00:00:00 2098-08-26 12:00:00 2098-09-06 00:00:00 2098-09-16 00:00:00 2098-09-26 00:00:00 2098-10-06 00:00:00 2098-10-16 00:00:00 2098-10-26 12:00:00 2098-11-06 00:00:00 2098-11-16 00:00:00 2098-11-26 00:00:00 2098-12-06 00:00:00 2098-12-16 00:00:00 2098-12-26 12:00:00 2099-01-06 00:00:00 2099-01-16 00:00:00 2099-01-26 12:00:00 2099-02-06 00:00:00 2099-02-16 00:00:00 2099-02-25 00:00:00 2099-03-06 00:00:00 2099-03-16 00:00:00 2099-03-26 12:00:00 2099-04-06 00:00:00 2099-04-16 00:00:00 2099-04-26 00:00:00 2099-05-06 00:00:00 2099-05-16 00:00:00 2099-05-26 12:00:00 2099-06-06 00:00:00 2099-06-16 00:00:00 2099-06-26 00:00:00 2099-07-06 00:00:00 2099-07-16 00:00:00 2099-07-26 12:00:00 2099-08-06 00:00:00 2099-08-16 00:00:00 2099-08-26 12:00:00 2099-09-06 00:00:00 2099-09-16 00:00:00 2099-09-26 00:00:00 2099-10-06 00:00:00 2099-10-16 00:00:00 2099-10-26 12:00:00 2099-11-06 00:00:00 2099-11-16 00:00:00 2099-11-26 00:00:00 2099-12-06 00:00:00 2099-12-16 00:00:00 2099-12-26 12:00:00
# Create monthly factors for a baseline era and early, mid and late 21st century eras
baseline <- t$factor(era = 1991:2020)
future <- t$factor(era = list(early = 2021:2040, mid = 2041:2060, late = 2061:2080))
str(future)
#> List of 3
#> $ early: Factor w/ 12 levels "01","02","03",..: NA NA NA NA NA NA NA NA NA NA ...
#> ..- attr(*, "era")= int 20
#> ..- attr(*, "period")= chr "month"
#> $ mid : Factor w/ 12 levels "01","02","03",..: NA NA NA NA NA NA NA NA NA NA ...
#> ..- attr(*, "era")= int 20
#> ..- attr(*, "period")= chr "month"
#> $ late : Factor w/ 12 levels "01","02","03",..: NA NA NA NA NA NA NA NA NA NA ...
#> ..- attr(*, "era")= int 20
#> ..- attr(*, "period")= chr "month"
For the “era†version, there are two interesting things to note here:
NA
values where the time series values are not falling in
the era. This ensures that the factor is compatible with the data set
which the time series describes, such that functions like
tapply()
will not throw an error.There are six periods defined for factoring:
year
, to summarize data to yearly timescalesseason
, the meteorological seasons. Note that the month
of December will be added to the months of January and February of the
following year, so the date “2020-12-01†yields the factor value
“2021S1â€.quarter
, the standard quarters of the year.month
, monthly summaries, the default period.dekad
, 10-day period. Each month is subdivided in
dekads as follows: (1) days 01 - 10; (2) days 11 - 20; (3) remainder of
the month.day
, to summarize sub-daily data.A CFTime
instance describes the “time†dimension of an
associated data set. When you process that dimension of the data set
using CFTime$factor()
or another method to filter or
otherwise subset the “time†dimension, the resulting data set will have
a different “time†dimension. To associate a proper CFTime
instance with your processing result, the methods in this package return
that CFTime
instance as an attribute:
(new_time <- attr(f_k, "CFTime"))
#> CF calendar:
#> Origin : 1850-01-01 00:00:00
#> Units : days
#> Type : noleap
#> Time series:
#> Elements: [2015-01-06 00:00:00 .. 2099-12-26 12:00:00] (average of 10.138771 days between 3060 elements)
#> Bounds : regular and consecutive
In the vignette “Processing climate projection data†is a fully worked out example of this.
You can test if your time series is complete with the function
is_complete()
. A time series is considered complete if the
time steps between the two extreme values are equally spaced. There is a
“fuzzy†assessment of completeness for time series with a datum unit of
“days†or smaller where the time steps are months or years apart - these
have different lengths in days in different months or years (e.g. a leap
year).
If your time series is incomplete, for instance because it has missing time steps, you should recognize that in your further processing. As an example, you might want to filter out months that have fewer than 90% of daily data from further processing or apply weights based on the actual coverage.
# Is the time series complete?
is_complete(t)
#> [1] TRUE
# How many time units fit in a factor level?
t$factor_units(baseline)
#> [1] 31 28 31 30 31 30 31 31 30 31 30 31
# What's the absolute and relative coverage of our time series
t$factor_coverage(baseline, "absolute")
#> [1] 186 168 186 180 186 180 186 186 180 186 180 186
t$factor_coverage(baseline, "relative")
#> [1] 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2 0.2
The time series is complete but coverage of the baseline era is only
20%! Recall that the time series starts in 2015 while the baseline
period in the factor is for 1991:2020
so that’s only 6
years of time series data out of 30 years of the baseline factor.
An artificial example of missing data:
# 4 years of data on a `365_day` calendar, keep 80% of values
n <- 365 * 4
cov <- 0.8
offsets <- sample(0:(n-1), n * cov)
(t <- CFtime("days since 2020-01-01", "365_day", offsets))
#> Warning: Offsets not monotonically increasing.
#> CF calendar:
#> Origin : 2020-01-01 00:00:00
#> Units : days
#> Type : 365_day
#> Time series:
#> Elements: [2020-01-01 .. 2023-12-31] (average of 1.250214 days between 1168 elements)
#> Bounds : not set
# Note that there are about 1.25 days between observations
mon <- t$factor("month")
t$factor_coverage(mon, "absolute")
#> [1] 25 21 22 18 26 22 25 22 26 27 26 22 28 23 23 26 20 20 26 26 24 27 21 27 26
#> [26] 18 25 25 25 23 20 28 27 24 28 25 22 21 22 23 22 26 28 28 27 27 27 28
t$factor_coverage(mon, "relative")
#> [1] 0.8064516 0.7500000 0.7096774 0.6000000 0.8387097 0.7333333 0.8064516
#> [8] 0.7096774 0.8666667 0.8709677 0.8666667 0.7096774 0.9032258 0.8214286
#> [15] 0.7419355 0.8666667 0.6451613 0.6666667 0.8387097 0.8387097 0.8000000
#> [22] 0.8709677 0.7000000 0.8709677 0.8387097 0.6428571 0.8064516 0.8333333
#> [29] 0.8064516 0.7666667 0.6451613 0.9032258 0.9000000 0.7741935 0.9333333
#> [36] 0.8064516 0.7096774 0.7500000 0.7096774 0.7666667 0.7096774 0.8666667
#> [43] 0.9032258 0.9032258 0.9000000 0.8709677 0.9000000 0.9032258
Keep in mind, though, that there are data sets where the time unit is
lower than the intended resolution of the data. Since the CF conventions
recommend that the coarsest time unit is “dayâ€, many files with monthly
data sets have a definition like days since 2016-01-01
with
offset values for the middle of the month like
15, 44, 74, 104, ...
. Even in these scenarios you can
verify that your data set is complete with the function
CFcomplete()
.
The CF Metadata Conventions supports 11 different calendars. None of
these are fully compatible with POSIXt, the basis for timekeeping on
virtually all computers. The reason for this is that POSIXt does not
consider leap seconds (just like the tai
calendar) but
computer clocks are periodically synchronized using Network Time
Protocol servers that report UTC time. The problem is easily
demonstrated:
# 1970-01-01 is the origin of POSIXt
difftime(as.POSIXct("2024-01-01"), as.POSIXct("1970-01-01"), units = "sec")
#> Time difference of 1704067200 secs
# Leap seconds in UTC
.leap.seconds
#> [1] "1972-07-01 GMT" "1973-01-01 GMT" "1974-01-01 GMT" "1975-01-01 GMT"
#> [5] "1976-01-01 GMT" "1977-01-01 GMT" "1978-01-01 GMT" "1979-01-01 GMT"
#> [9] "1980-01-01 GMT" "1981-07-01 GMT" "1982-07-01 GMT" "1983-07-01 GMT"
#> [13] "1985-07-01 GMT" "1988-01-01 GMT" "1990-01-01 GMT" "1991-01-01 GMT"
#> [17] "1992-07-01 GMT" "1993-07-01 GMT" "1994-07-01 GMT" "1996-01-01 GMT"
#> [21] "1997-07-01 GMT" "1999-01-01 GMT" "2006-01-01 GMT" "2009-01-01 GMT"
#> [25] "2012-07-01 GMT" "2015-07-01 GMT" "2017-01-01 GMT"
difftime()
is off by 27 seconds, the number of leap
seconds in UTC since their introduction in 1972. Your computer may have
the correct time based on UTC, but calculations over periods that
include leap seconds are always off by a number of seconds.
If 27 seconds is of no concern to you or your application - perhaps
your data has a daily resolution - then you can safely forget about the
leap seconds in several of the calendars, in particular
standard
(for periods after 1582-10-15),
proleptic_gregorian
and tai
. The
utc
calendar does account for leap seconds so consider if
you should use that - this is the only calendar that considers leap
seconds in calculation. These calendars support the generation of
timestamps in POSIXct with the as_timestamp()
function but
note the potential for a discrepancy due to the presence of leap
seconds.
If second accuracy is of concern, then you should carefully consider
the time keeping in the source of your data and use a matching calendar.
The utc
calendar is a sensible option if your equipment
synchronizes time with an NTP server or a computer that does so. Even
then you should ensure that time is accurate after a new leap second is
introduced.
The other calendars have discrepancies with POSIXt that are much
larger, namely one or more days. These calendars do not support POSIXct
timestamps and an error will be thrown if you try. If you really want
the timestamps in POSIXct then you can generate the timestamps as
character strings using this package, and then convert to a
POSIXct
or Date
using the available R tools.
Converting time series using these incompatible calendars to
POSIXct
or Date
is likely to produce problems.
This is most pronounced for the 360_day
calendar:
# Days in January and February
t <- CFtime("days since 2023-01-01", "360_day", 0:59)
ts_days <- t$as_timestamp("date")
as.Date(ts_days)
#> [1] "2023-01-01" "2023-01-02" "2023-01-03" "2023-01-04" "2023-01-05"
#> [6] "2023-01-06" "2023-01-07" "2023-01-08" "2023-01-09" "2023-01-10"
#> [11] "2023-01-11" "2023-01-12" "2023-01-13" "2023-01-14" "2023-01-15"
#> [16] "2023-01-16" "2023-01-17" "2023-01-18" "2023-01-19" "2023-01-20"
#> [21] "2023-01-21" "2023-01-22" "2023-01-23" "2023-01-24" "2023-01-25"
#> [26] "2023-01-26" "2023-01-27" "2023-01-28" "2023-01-29" "2023-01-30"
#> [31] "2023-02-01" "2023-02-02" "2023-02-03" "2023-02-04" "2023-02-05"
#> [36] "2023-02-06" "2023-02-07" "2023-02-08" "2023-02-09" "2023-02-10"
#> [41] "2023-02-11" "2023-02-12" "2023-02-13" "2023-02-14" "2023-02-15"
#> [46] "2023-02-16" "2023-02-17" "2023-02-18" "2023-02-19" "2023-02-20"
#> [51] "2023-02-21" "2023-02-22" "2023-02-23" "2023-02-24" "2023-02-25"
#> [56] "2023-02-26" "2023-02-27" "2023-02-28" NA NA
31 January is missing from the vector of Date
s because
the 360_day
calendar does not include it and 29 and 30
February are NA
s because POSIXt rejects them. This will
produce problems later on when processing your data.
The general advice is therefore: do not convert CFTime
objects to Date objects unless you are sure that the
CFTime
object uses a POSIXt-compatible calendar.
The degree of incompatibility for the various calendars is as follows:
standard
: Only valid for periods after 1582-10-15. The
preceeding period uses the Julian calendar.julian
: Every fouth year is a leap year. Dates like
2100-02-29
and 2200-02-29
are valid.365_day
or noleap
: No leap year exists.
2020-02-29
does not occur.366_day
or all_leap
: All years are leap
years.360_day
: All months have 30 days in every year. This
means that 31 January, March, May, July, August, October and December
never occur, while 29 and 30 February occur in every year.One reason to convert the time dimension from different climate projection data sets is to be able to compare the data from different models and produce a multi-model ensemble. The correct procedure to do this is to first calculate for each data set individually the property of interest (e.g. average daily rainfall per month anomaly for some future period relative to a baseline period), which will typically involve aggregation to a lower resolution (such as from daily data to monthly averages), and only then combine the aggregate data from multiple data sets to compute statistically interesting properties (such as average among models and standard deviation, etc).
Once data is aggregated from daily or higher-resolution values to a
lower temporal resolution - such as a “month†- the different calendars
no longer matter (although if you do need to convert averaged data
(e.g. average daily precipitation in a month) to absolute data
(e.g. precipitation per month) you should use
CFfactor_units()
to make sure that you use the correct
scaling factor).
Otherwise, there really shouldn’t be any reason to convert the time
series in the data files to Date
s. Climate projection data
is virtually never compared on a day-to-day basis between different
models and neither does complex date arithmetic make much sense (such as
adding intervals) - CFtime
can support basic arithmetic by
manipulation the offsets of the CFTime
object. The
character representations that are produced are perfectly fine to use
for dimnames()
on an array or as rownames()
in
a data.frame
and these also support basic logical
operations such as "2023-02-30" < "2023-03-01"
. So ask
yourself, do you really need Date
s when working with
unprocessed climate projection data? (If so, open an issue on
GitHub).
A complete example of creating a multi-model ensemble is provided in the vignette “Processing climate projection dataâ€.