doSNOW/0000755000176000001440000000000012234025602011407 5ustar ripleyusersdoSNOW/inst/0000755000176000001440000000000012233533364012374 5ustar ripleyusersdoSNOW/inst/examples/0000755000176000001440000000000012233533364014212 5ustar ripleyusersdoSNOW/inst/examples/boot.R0000644000176000001440000000467512150662431015310 0ustar ripleyuserssuppressMessages(library(doSNOW)) cl <- makeSOCKcluster(4) registerDoSNOW(cl) cat(sprintf('doSNOW %s\n', packageVersion('doSNOW'))) junk <- matrix(0, 1000000, 8) cat(sprintf('Size of extra junk data: %d bytes\n', object.size(junk))) x <- iris[which(iris[,5] != "setosa"), c(1,5)] trials <- 10000 ptime <- system.time({ r <- foreach(icount(trials), .combine=cbind, .export='junk') %dopar% { ind <- sample(100, 100, replace=TRUE) result1 <- glm(x[ind,2]~x[ind,1], family=binomial(logit)) coefficients(result1) } })[3] cat(sprintf('parallel foreach: %6.1f sec\n', ptime)) ptime2 <- system.time({ snowopts <- list(preschedule=TRUE) r <- foreach(icount(trials), .combine=cbind, .export='junk', .options.snow=snowopts) %dopar% { ind <- sample(100, 100, replace=TRUE) result1 <- glm(x[ind,2]~x[ind,1], family=binomial(logit)) coefficients(result1) } })[3] cat(sprintf('parallel foreach with prescheduling: %6.1f sec\n', ptime2)) ptime3 <- system.time({ chunks <- getDoParWorkers() r <- foreach(n=idiv(trials, chunks=chunks), .combine=cbind, .export='junk') %dopar% { y <- lapply(seq_len(n), function(i) { ind <- sample(100, 100, replace=TRUE) result1 <- glm(x[ind,2]~x[ind,1], family=binomial(logit)) coefficients(result1) }) do.call('cbind', y) } })[3] cat(sprintf('chunked parallel foreach: %6.1f sec\n', ptime3)) ptime4 <- system.time({ mkworker <- function(x, junk) { force(x) force(junk) function(i) { ind <- sample(100, 100, replace=TRUE) result1 <- glm(x[ind,2]~x[ind,1], family=binomial(logit)) coefficients(result1) } } y <- parLapply(cl, seq_len(trials), mkworker(x, junk)) r <- do.call('cbind', y) })[3] cat(sprintf('parLapply: %6.1f sec\n', ptime4)) stime <- system.time({ y <- lapply(seq_len(trials), function(i) { ind <- sample(100, 100, replace=TRUE) result1 <- glm(x[ind,2]~x[ind,1], family=binomial(logit)) coefficients(result1) }) r <- do.call('cbind', y) })[3] cat(sprintf('sequential lapply: %6.1f sec\n', stime)) stime2 <- system.time({ r <- foreach(icount(trials), .combine=cbind) %do% { ind <- sample(100, 100, replace=TRUE) result1 <- glm(x[ind,2]~x[ind,1], family=binomial(logit)) coefficients(result1) } })[3] cat(sprintf('sequential foreach: %6.1f sec\n', stime2)) stopCluster(cl) doSNOW/NAMESPACE0000644000176000001440000000007012233530223012622 0ustar ripleyusersexport(registerDoSNOW) import(foreach, iterators, snow) doSNOW/R/0000755000176000001440000000000012233533364011620 5ustar ripleyusersdoSNOW/R/doSNOW.R0000644000176000001440000001726112210725151013053 0ustar ripleyusers# # Copyright (c) 2008-2010, Revolution Analytics # # This is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as published # by the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 # USA registerDoSNOW <- function(cl) { setDoPar(doSNOW, cl, info) } info <- function(data, item) { switch(item, workers=length(data), # XXX is this right? name='doSNOW', version=packageDescription('doSNOW', fields='Version'), NULL) } makeDotsEnv <- function(...) { list(...) function() NULL } .doSnowGlobals <- new.env(parent=emptyenv()) workerInit <- function(expr, exportenv, packages, attach=FALSE) { assign('expr', expr, .doSnowGlobals) assign('exportenv', exportenv, .doSnowGlobals) exportEnv <- .doSnowGlobals$exportenv parent.env(exportEnv) <- globalenv() if (attach) { attach(exportEnv) } tryCatch({ for (p in packages) library(p, character.only=TRUE) NULL # indicates success }, error=function(e) { # a character string indicates an error conditionMessage(e) }) } evalWrapper <- function(args) { exportEnv <- .doSnowGlobals$exportenv lapply(names(args), function(n) assign(n, args[[n]], pos=.doSnowGlobals$exportenv)) tryCatch(eval(.doSnowGlobals$expr, envir=.doSnowGlobals$exportenv), error=function(e) e) } workerCleanup <- function() { if ("exportEnv" %in% search()) { detach(exportEnv) } } # This function takes the place of workerInit and evalWrapper when # preschedule is enabled. It is executed by the master via clusterApply # such that there is a single chunked task for each worker in the # cluster, rather than using clusterCall to initialize the workers and # clusterApplyLB to compute the tasks one-by-one. This strategy can be # significantly more efficient when there are many small tasks, and is # very similar to the default behavior of mclapply. workerPreschedule <- function(largs, expr, exportenv, packages) { parent.env(exportenv) <- globalenv() task <- function(args) { lapply(names(args), function(n) assign(n, args[[n]], pos=exportenv)) eval(expr, envir=exportenv) } tryCatch({ # load all necessary packages for (p in packages) library(p, character.only=TRUE) # execute all of the tasks lapply(largs, task) }, error=function(e) { # only one exception was thrown, but we don't know which one, # so we'll return it for all of the tasks lapply(seq_along(largs), function(i) e) }) } comp <- if (getRversion() < "2.13.0") { function(expr, ...) expr } else { compiler::compile } doSNOW <- function(obj, expr, envir, data) { cl <- data preschedule <- FALSE attachExportEnv <- FALSE if (!inherits(obj, 'foreach')) stop('obj must be a foreach object') it <- iter(obj) accumulator <- makeAccum(it) # check for snow-specific options options <- obj$options$snow if (!is.null(options)) { nms <- names(options) recog <- nms %in% c('preschedule', 'attachExportEnv') if (any(!recog)) warning(sprintf('ignoring unrecognized snow option(s): %s', paste(nms[!recog], collapse=', ')), call.=FALSE) if (!is.null(options$preschedule)) { if (!is.logical(options$preschedule) || length(options$preschedule) != 1) { warning('preschedule must be logical value', call.=FALSE) } else { if (obj$verbose) cat(sprintf('bundling all tasks into %d chunks\n', length(cl))) preschedule <- options$preschedule } } if (!is.null(options$attachExportEnv)) { if (!is.logical(options$attachExportEnv) || length(options$attachExportEnv) != 1) { warning('attachExportEnv must be logical value', call.=FALSE) } else { if (obj$verbose) cat("attaching export environment\n") attachExportEnv <- options$attachExportEnv } } } # setup the parent environment by first attempting to create an environment # that has '...' defined in it with the appropriate values exportenv <- tryCatch({ qargs <- quote(list(...)) args <- eval(qargs, envir) environment(do.call(makeDotsEnv, args)) }, error=function(e) { new.env(parent=emptyenv()) }) noexport <- union(obj$noexport, obj$argnames) getexports(expr, exportenv, envir, bad=noexport) vars <- ls(exportenv) if (obj$verbose) { if (length(vars) > 0) { cat('automatically exporting the following variables', 'from the local environment:\n') cat(' ', paste(vars, collapse=', '), '\n') } else { cat('no variables are automatically exported\n') } } # compute list of variables to export export <- unique(obj$export) ignore <- intersect(export, vars) if (length(ignore) > 0) { warning(sprintf('already exporting variable(s): %s', paste(ignore, collapse=', '))) export <- setdiff(export, ignore) } # add explicitly exported variables to exportenv if (length(export) > 0) { if (obj$verbose) cat(sprintf('explicitly exporting variables(s): %s\n', paste(export, collapse=', '))) for (sym in export) { if (!exists(sym, envir, inherits=TRUE)) stop(sprintf('unable to find variable "%s"', sym)) val <- get(sym, envir, inherits=TRUE) if (is.function(val) && (identical(environment(val), .GlobalEnv) || identical(environment(val), envir))) { # Changing this function's environment to exportenv allows it to # access/execute any other functions defined in exportenv. This # has always been done for auto-exported functions, and not # doing so for explicitly exported functions results in # functions defined in exportenv that can't call each other. environment(val) <- exportenv } assign(sym, val, pos=exportenv, inherits=FALSE) } } # compile the expression if we're using R 2.13.0 or greater xpr <- comp(expr, env=envir, options=list(suppressUndefined=TRUE)) if (! preschedule) { # send exports to workers r <- clusterCall(cl, workerInit, xpr, exportenv, obj$packages, attachExportEnv) for (emsg in r) { if (!is.null(emsg)) stop('worker initialization failed: ', emsg) } # execute the tasks argsList <- as.list(it) results <- clusterApplyLB(cl, argsList, evalWrapper) # clean up the workers if (attachExportEnv){ clusterCall(cl, workerCleanup) } } else { # convert argument iterator into a list of lists argsList <- splitList(as.list(it), length(cl)) # execute the tasks results <- do.call(c, clusterApply(cl, argsList, workerPreschedule, xpr, exportenv, obj$packages)) } # call the accumulator with all of the results tryCatch(accumulator(results, seq(along=results)), error=function(e) { cat('error calling combine function:\n') print(e) }) # check for errors errorValue <- getErrorValue(it) errorIndex <- getErrorIndex(it) # throw an error or return the combined results if (identical(obj$errorHandling, 'stop') && !is.null(errorValue)) { msg <- sprintf('task %d failed - "%s"', errorIndex, conditionMessage(errorValue)) stop(simpleError(msg, call=expr)) } else { getResult(it) } } doSNOW/MD50000644000176000001440000000036612234025602011724 0ustar ripleyusersdc46a7e057e0e65d1a3ed92e1ee657a0 *DESCRIPTION 499b8ad43ffe1eeeb51673103cd94cc0 *NAMESPACE a97d142e91cfc597a60264dd3fcedc8b *R/doSNOW.R e45c2ef63f7283eb4ab97e2cc19f4adb *inst/examples/boot.R 420a7faaf5786b52c57a70069d31ca10 *man/registerDoSNOW.Rd doSNOW/DESCRIPTION0000644000176000001440000000123612234025602013117 0ustar ripleyusersPackage: doSNOW Type: Package Title: Foreach parallel adaptor for the snow package Version: 1.0.9 Author: Revolution Analytics Maintainer: Revolution Analytics Description: Provides a parallel backend for the %dopar% function using Luke Tierney's snow package. Depends: R (>= 2.5.0), foreach(>= 1.2.0), iterators(>= 1.0.0), snow(>= 0.3.0), utils Suggests: compiler License: GPL-2 Repository: CRAN Repository/R-Forge/Project: dosnow Repository/R-Forge/Revision: 12 Repository/R-Forge/DateTimeStamp: 2013-10-28 18:44:35 Date/Publication: 2013-10-29 22:41:22 Packaged: 2013-10-28 19:15:26 UTC; rforge NeedsCompilation: no doSNOW/man/0000755000176000001440000000000012233533364012172 5ustar ripleyusersdoSNOW/man/registerDoSNOW.Rd0000644000176000001440000000055411474761507015313 0ustar ripleyusers\name{registerDoSNOW} \alias{registerDoSNOW} \title{registerDoSNOW} \description{ The \code{registerDoSNOW} function is used to register the SNOW parallel backend with the foreach package. } \usage{ registerDoSNOW(cl) } \arguments{ \item{cl}{The cluster object to use for parallel execution.} } \seealso{ \code{\link[snow]{makeCluster}} } \keyword{utilities}